コード例 #1
0
    def __init__(self):
        self.seq = 0
        self.tf_pub = tf.TransformBroadcaster()
        self.orange1 = (0.641782207489, -0.224464386702, -0.363829042912)
        self.orange2 = (0.69, -0.31, -0.363829042912)
        self.orange3 = (0.68, -0.10, -0.363829042912)

        self.blue1 = (0.641782207489, -0.224464386702, -0.363829042912)
        self.red1 = (0.69, -0.31, -0.363829042912)
        self.green1 = (0.68, -0.10, -0.363829042912)
        self.yellow1 = (0.68, -0.20, -0.363829042912)

        table_pos = (0.5, 0., 0.863 - 0.5)
        self.table_pos, self.table_rot = pm.toTf(
            kdl.Frame(kdl.Rotation.RotZ(-np.pi / 2.), kdl.Vector(*table_pos)))
        box_pos = (0.82, -0.4, 0.863 - 0.5 + 0.1025)
        self.box_pos, self.box_rot = pm.toTf(
            kdl.Frame(kdl.Rotation.RotZ(1.5), kdl.Vector(*box_pos)))
        box_pos = (0.82, -0.4, 0.863 - 0.5 + 0.1025)
        self.box_pos, self.box_rot = pm.toTf(
            kdl.Frame(kdl.Rotation.RotZ(1.5), kdl.Vector(*box_pos)))
        b1_pos = (0.78, -0.03, 0.863 - 0.5 + 0.0435)
        self.block1_pos, self.block1_rot = pm.toTf(
            kdl.Frame(kdl.Rotation.RotZ(-np.pi / 2.), kdl.Vector(*b1_pos)))
        b1_pos = (0.73, 0.12, 0.863 - 0.5 + 0.0435)
        self.block2_pos, self.block2_rot = pm.toTf(
            kdl.Frame(kdl.Rotation.RotZ(-np.pi / 2. + 0.07),
                      kdl.Vector(*b1_pos)))
コード例 #2
0
    def test_custom_reference_relative_move(self):
        """ Test if relative moves work with custom reference frame as expected

            Test sequence:
                1. Get the test data and publish reference frame via TF.
                2. Create a relative Ptp with and without custom reference.
                3. convert the goals.
                4. Evaluate the results.
        """
        rospy.loginfo("test_custom_reference_frame_relative")

        self.robot.move(Ptp(goal=[0, 0, 0, 0, 0, 0]))

        # get and transform test data
        ref_frame = self.test_data.get_pose("Blend_1_Mid", PLANNING_GROUP_NAME)
        goal_pose_bf = self.test_data.get_pose("Blend_1_Start", PLANNING_GROUP_NAME)
        goal_pose_bf_tf = pm.toTf(pm.fromMsg(goal_pose_bf))
        ref_frame_tf = pm.toTf(pm.fromMsg(ref_frame))

        rospy.sleep(rospy.Duration.from_sec(0.5))

        # init
        base_frame_name = self.robot._robot_commander.get_planning_frame()
        time_tf = rospy.Time.now()
        goal_pose_in_rf = [[0, 0, 0], [0, 0, 0, 1]]

        try:
            self.tf.sendTransform(goal_pose_bf_tf[0], goal_pose_bf_tf[1], time_tf,
                                  "rel_goal_pose_bf", base_frame_name)
            self.tf.sendTransform(ref_frame_tf[0], ref_frame_tf[1], time_tf,
                                  "ref_rel_frame", base_frame_name)

            self.tf_listener.waitForTransform("rel_goal_pose_bf", "ref_rel_frame", time_tf, rospy.Duration(2, 0))

            rospy.sleep(rospy.Duration.from_sec(0.1))

            goal_pose_in_rf = self.tf_listener.lookupTransform("ref_rel_frame", "rel_goal_pose_bf", time_tf)
        except:
            rospy.logerr("Failed to setup transforms for test!")

        goal_pose_rf_msg = pm.toMsg(pm.fromTf(goal_pose_in_rf))

        # move to initial position and use relative move to reach goal
        self.robot.move(Ptp(goal=ref_frame))

        self.tf.sendTransform(ref_frame_tf[0], ref_frame_tf[1], rospy.Time.now(), "ref_rel_frame", base_frame_name)
        rospy.sleep(rospy.Duration.from_sec(0.1))
        self.tf_listener.waitForTransform(base_frame_name, "ref_rel_frame", rospy.Time(0), rospy.Duration(1, 0))

        ptp = Ptp(goal=goal_pose_rf_msg, reference_frame="ref_rel_frame", relative=True)
        req = ptp._cmd_to_request(self.robot)

        self.assertIsNotNone(req)
        self._analyze_request_pose(TARGET_LINK_NAME, goal_pose_bf, req)
コード例 #3
0
    def tick(self):
        '''
        Look for all transforms in the list
        '''

        if not self.listener.frameExists(self.root):
            rospy.logwarn("%s was missing" % self.root)
            return

        count = 0
        avg_p = np.zeros(3)
        avg_q = np.zeros(4)
        for name, pose in self.transforms.items():
            if self.listener.frameExists(name):
                t = self.listener.getLatestCommonTime(self.root, name)
                p, q = self.listener.lookupTransform(self.root, name, t)
                F = pm.fromTf((p, q))
                F = F * pose
                p, q = pm.toTf(F)
                avg_p += np.array(p)
                avg_q += np.array(q)
                count += 1
        if count > 0:
            avg_p /= count
            avg_q /= count
            avg_q /= np.linalg.norm(avg_q)

            if self.history_length > 0:
                # use history to smooth predictions
                if len(self.history) >= self.history_length:
                    self.history.pop()
                self.history.appendleft((avg_p, avg_q))
                avg_p = np.zeros(3)
                avg_q = np.zeros(4)
                for p, q in self.history:
                    avg_p += p
                    avg_q += q

                avg_p /= len(self.history)
                avg_q /= len(self.history)
                avg_q /= np.linalg.norm(avg_q)

            if self.offset is not None:
                # apply some offset after we act
                F = pm.fromTf((avg_p, avg_q))
                F = F * self.offset
                avg_p, avg_q = pm.toTf(F)

            self.broadcaster.sendTransform(avg_p, avg_q, rospy.Time.now(),
                                           self.name, self.root)
コード例 #4
0
 def update_tf(self):
     
     for key in self.waypoints.keys():
         (poses,names) = self.get_waypoints_srv.get_waypoints(key,[],self.waypoints[key],self.waypoint_names[key])
         for (pose,name) in zip(poses,names):
             (trans,rot) = pm.toTf(pm.fromMsg(pose))
             self.broadcaster.sendTransform(trans,rot,rospy.Time.now(),name,self.world)
コード例 #5
0
def publish_grasp_tran(tran):
    """@brief - Add a grasp relative to the world origin to the scene graph
    @param tran - the grasp transform., 
    """
    bc = tf.TransformBroadcaster()
    ttf = pm.toTf(pm.fromMatrix(tran))
    bc.sendTransform(ttf[0], ttf[1], rospy.Time.now(),  "/hand_goal_pose", "/world")  
コード例 #6
0
ファイル: omni_im.py プロジェクト: danepowell/omni_im
def processMarkerFeedback(feedback):
    global marker_tf, marker_name
    marker_name = feedback.marker_name
    marker_offset_tf = ((0, 0, marker_offset), tf.transformations.quaternion_from_euler(0, 0, 0))
    marker_tf = pm.toTf(pm.fromMatrix(numpy.dot(pm.toMatrix(pm.fromMsg(feedback.pose)), pm.toMatrix(pm.fromTf(marker_offset_tf)))))
    if feedback.menu_entry_id:
        marker_tf = zero_tf
コード例 #7
0
 def pub_graspit_grasp_tf(self, object_name, graspit_grasp_msg):
     # type: (str, graspit_msgs.msg.Grasp) -> ()
     # Is this needed anymore?
     tf_pose = pm.toTf(pm.fromMsg(graspit_grasp_msg.final_grasp_pose))
     self.tf_broadcaster.sendTransform(tf_pose[0], tf_pose[1],
                                       rospy.Time.now(),
                                       "/grasp_approach_tran", object_name)
コード例 #8
0
    def step(self):
        if self.T_ is None:
            self.init_tf()
            return

        if self.recv_:
            self.recv_ = False
            pose = self.odom_.pose.pose
            T0 = pm.toMatrix(pm.fromMsg(pose))  # android_odom --> android

            # send odom -> base_link transform
            T = tf.transformations.concatenate_matrices(self.T_, T0, self.Ti_)
            frame = pm.fromMatrix(T)
            if self.pub_tf_:
                txn, qxn = pm.toTf(frame)
                self.tfb_.sendTransform(txn, qxn, self.odom_.header.stamp,
                                        'odom', 'base_link')

            # send as msg
            # TODO : does not deal with twist/covariance information
            msg = pm.toMsg(frame)
            self.cvt_msg_.pose.pose = pm.toMsg(frame)
            self.cvt_msg_.header.frame_id = 'map'  # experimental
            self.cvt_msg_.header.stamp = self.odom_.header.stamp
            self.pub_.publish(self.cvt_msg_)
コード例 #9
0
    def process_grasp_msg(self, grasp_msg):
        """@brief - Attempt to make the adjustment specified by grasp_msg
        1. plan a path to the a place 15cm back from the new pose
        """
        print 'regular move:'
        print grasp_msg
        #release the object
        tp.open_barrett()

        #get the robot's current location and calculate the absolute tran after the adjustment
        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        handGoalInBase = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))

        try:
            hand_tf = pm.toTf(pm.fromMatrix(handGoalInBase))
            bc = tf.TransformBroadcaster()
            now = rospy.Time.now()
            bc.sendTransform(hand_tf[0], hand_tf[1], now, "hand_goal",
                             "armbase")
            self.global_data.listener.waitForTransform("armbase",
                                                       "arm_goal", now,
                                                       rospy.Duration(1.0))
            armtip_robot = self.global_data.listener.lookupTransform(
                'armbase', 'arm_goal', now)
            armGoalInBase = pm.toMatrix(pm.fromTf(armtip_robot))
        except Exception, e:
            handle_fatal('convert_cartesian_world_goal failed: error %s.' % e)
コード例 #10
0
 def pub_moveit_grasp_tf(self, object_name, moveit_grasp_msg):
     # type: (str, moveit_msgs.msg.Grasp) -> ()
     tf_pose = pm.toTf(pm.fromMsg(moveit_grasp_msg.grasp_pose.pose))
     self.tf_broadcaster.sendTransform(tf_pose[0], tf_pose[1],
                                       rospy.Time.now(),
                                       "/moveit_end_effector_frame",
                                       object_name)
コード例 #11
0
def publish_grasp_tran(tran):
    """@brief - Add a grasp relative to the world origin to the scene graph
    @param tran - the grasp transform., 
    """
    bc = tf.TransformBroadcaster()
    ttf = pm.toTf(pm.fromMatrix(tran))
    bc.sendTransform(ttf[0], ttf[1], rospy.Time.now(), "/hand_goal_pose",
                     "/world")
コード例 #12
0
 def publish_tf(self):
     for key in self.waypoints.keys():
         (poses, names) = self.get_waypoints_srv.get_waypoints(
             key, [], self.waypoints[key], self.waypoint_names[key])
         for (pose, name) in zip(poses, names):
             (trans, rot) = pm.toTf(pm.fromMsg(pose))
             self.broadcaster.sendTransform(trans, rot, rospy.Time.now(),
                                            name, self.world)
コード例 #13
0
def broadcast_table_frame(args):
    if table_pose is None:
        return
    with tf_lock:
        trans, rot = pm.toTf(pm.fromMsg(table_pose.pose))
        br.sendTransform(trans, rot, rospy.Time.now(), "/table", table_pose.header.frame_id)
        br.sendTransform(trans, rot, rospy.Time.now() + rospy.Duration(0.005), "/table", table_pose.header.frame_id)
        br.sendTransform(trans, rot, rospy.Time.now() - rospy.Duration(0.005), "/table", table_pose.header.frame_id)
コード例 #14
0
    def test_current_pose(self):
        """ Test the current pose method

            1. create and publish tf
            2. get current pose with base and ref
            3. move roboter to ref
            4. get current pose with ref and base
            5. analyse positions
        """
        rospy.loginfo("test_current_pose")

        self.robot.move(Ptp(goal=[0, 0, 0, 0, 0, 0]))

        # get and transform test data
        ref_frame = self.test_data.get_pose("Blend_1_Mid", PLANNING_GROUP_NAME)
        ref_frame_tf = pm.toTf(pm.fromMsg(ref_frame))

        rospy.sleep(rospy.Duration.from_sec(0.5))

        # init
        tcp_ref = [[0, 0, 0], [0, 0, 0, 1]]
        tcp_base = [[0, 0, 0], [0, 0, 0, 1]]
        time_tf = rospy.Time.now()
        base_frame = self.robot._robot_commander.get_planning_frame()

        try:
            self.tf.sendTransform(ref_frame_tf[0], ref_frame_tf[1],
                                  time_tf, "ref_frame", base_frame)

            self.tf_listener.waitForTransform(base_frame, "ref_frame", time_tf, rospy.Duration(2, 0))
            rospy.sleep(rospy.Duration.from_sec(0.1))
            tcp_ref = self.tf_listener.lookupTransform("ref_frame", "prbt_tcp", time_tf)
            tcp_base = self.tf_listener.lookupTransform(base_frame, "prbt_tcp", time_tf)
        except Exception as e:
            print e
            rospy.logerr("Failed to setup transforms for test!")

        tcp_ref_msg = pm.toMsg(pm.fromTf(tcp_ref))
        tcp_base_msg = pm.toMsg(pm.fromTf(tcp_base))

        # read current pose, move robot and do it again
        start_bf = self.robot.get_current_pose()
        start_rf = self.robot.get_current_pose(base="ref_frame")

        self.robot.move(Ptp(goal=ref_frame))

        # resending tf (otherwise transform pose could read old transform to keep time close together.
        time_tf = rospy.Time.now()
        self.tf.sendTransform(ref_frame_tf[0], ref_frame_tf[1], time_tf, "ref_frame", base_frame)
        self.tf_listener.waitForTransform(base_frame, "ref_frame", time_tf, rospy.Duration(1, 0))

        ref_frame_bf = self.robot.get_current_pose()
        ref_frame_rf = self.robot.get_current_pose(base="ref_frame")

        self._analyze_pose(tcp_base_msg, start_bf)
        self._analyze_pose(tcp_ref_msg, start_rf)
        self._analyze_pose(ref_frame, ref_frame_bf)
        self._analyze_pose(Pose(orientation=Quaternion(0, 0, 0, 1)), ref_frame_rf)
コード例 #15
0
 def publish_target_pointcloud(self):
     self.model_list.sort(key=ModelManager.get_dist)
     x = self.model_list[0]
     tf_pose = pm.toTf(pm.fromMsg(x.pose))
     x.bc.sendTransform(tf_pose[0], tf_pose[1], rospy.Time.now(), "/object", "/camera_rgb_optical_frame")
     x.listener.waitForTransform("/world", "/object", rospy.Time(0), rospy.Duration(5))
     x.point_cloud_data.header.frame_id = "/object"
     pub = rospy.Publisher('/object_pointcloud', sensor_msgs.msg.PointCloud2)
     pub.publish(x.point_cloud_data)
コード例 #16
0
 def add_tf(self, frame_name, ps):
     """
     :type frame_name: str
     :type ps: geometry_msgs.msg.PoseStamped
     :param frame_name: the name of the new frame
     :param ps: the pose stamped for the new frame
     :return: None
     """
     translation, rotation = pm.toTf(pm.fromMsg(ps.pose))
     self._tfs[frame_name] = (ps, translation, rotation)
コード例 #17
0
    def test_roundtrip(self):
        c = Frame()

        d = pm.fromMsg(pm.toMsg(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromMatrix(pm.toMatrix(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromTf(pm.toTf(c))
        self.assertEqual(repr(c), repr(d))
コード例 #18
0
ファイル: posemath.py プロジェクト: MMwaveOctomap/OctomapROS
    def test_roundtrip(self):
        c = Frame()

        d = pm.fromMsg(pm.toMsg(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromMatrix(pm.toMatrix(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromTf(pm.toTf(c))
        self.assertEqual(repr(c), repr(d))
コード例 #19
0
def broadcast_table_frame(args):
    if table_pose is None: return
    with tf_lock:
        trans, rot = pm.toTf(pm.fromMsg(table_pose.pose))
        br.sendTransform(trans, rot, rospy.Time.now(), '/table',
                         table_pose.header.frame_id)
        br.sendTransform(trans, rot,
                         rospy.Time.now() + rospy.Duration(0.005), '/table',
                         table_pose.header.frame_id)
        br.sendTransform(trans, rot,
                         rospy.Time.now() - rospy.Duration(0.005), '/table',
                         table_pose.header.frame_id)
コード例 #20
0
ファイル: hydra_teleop.py プロジェクト: aagudel/lcsr_barrett
    def joy_cb(self, msg):
        # Convenience
        side = self.side
        b = msg.buttons
        lb = self.last_buttons

        if (rospy.Time.now() - self.last_hand_cmd) < rospy.Duration(0.1):
            return

        # Update enabled fingers
        self.move_f[0] =   self.move_f[0] ^   (b[self.B1[side]] and not lb[self.B1[side]])
        self.move_f[1] =   self.move_f[1] ^   (b[self.B2[side]] and not lb[self.B2[side]])
        self.move_f[2] =   self.move_f[2] ^   (b[self.B3[side]] and not lb[self.B3[side]])
        self.move_spread = self.move_spread ^ (b[self.B4[side]] and not lb[self.B4[side]])
        self.move_all =    self.move_all ^    (b[self.B_CENTER[side]] and not lb[self.B_CENTER[side]])

        # Check if the deadman is engaged
        if msg.axes[self.DEADMAN[side]] < 0.01:
            if self.deadman_engaged:
                self.hand_cmd.mode = [oro_barrett_msgs.msg.BHandCmd.MODE_VELOCITY] * 4
                self.hand_cmd.cmd = [0.0, 0.0, 0.0, 0.0]
                self.hand_pub.publish(self.hand_cmd)
                self.last_hand_cmd = rospy.Time.now()
            self.deadman_engaged = False
            self.deadman_max = 0.0
        else:
            self.handle_hand_cmd(msg)
            self.handle_cart_cmd(msg)

        self.last_buttons = msg.buttons
        self.last_axes = msg.axes

        # Broadcast the command if it's defined
        if self.cmd_frame:
            # Broadcast command frame
            tform = toTf(self.cmd_frame)
            self.broadcaster.sendTransform(tform[0], tform[1], rospy.Time.now(), self.cmd_frame_id, 'world')

            # Broadcast telemanip command
            telemanip_cmd = TelemanipCommand()
            telemanip_cmd.header.frame_id = 'world'
            telemanip_cmd.header.stamp = rospy.Time.now()
            telemanip_cmd.posetwist.pose = toMsg(self.cmd_frame)
            telemanip_cmd.resync_pose = msg.buttons[self.THUMB_CLICK[side]] == 1
            telemanip_cmd.deadman_engaged = self.deadman_engaged
            if msg.axes[self.THUMB_Y[side]] > 0.5:
                telemanip_cmd.grasp_opening = 0.0
            elif msg.axes[self.THUMB_Y[side]] < -0.5:
                telemanip_cmd.grasp_opening = 1.0
            else:
                telemanip_cmd.grasp_opening = 0.5
            telemanip_cmd.estop = False  # TODO: add master estop control
            self.telemanip_cmd_pub.publish(telemanip_cmd)
コード例 #21
0
    def compute_net_transform(self, base_pose):

        base_to_odom = self.tfl.lookupTransform("/base_footprint", "/odom_combined", rospy.Time())

        bto = pm.fromTf(base_to_odom)
        

        net_t = base_pose * bto

        print "Setting "
        print pm.toMsg(net_t)
        self.broad.transform = pm.toTf(net_t)
コード例 #22
0
ファイル: tune_camera.py プロジェクト: zchen24/ros_example
 def thread_update_pose(self):
     # update robot pose
     rate = rospy.Rate(30)
     while not rospy.is_shutdown():
         tf = pm.toTf(self.frame)
         self.br.sendTransform(tf[0],
                               tf[1],
                               rospy.Time.now(),
                               self.cam_frame, 
                               self.ref_frame)
         rate.sleep()
         pass
コード例 #23
0
 def publish_target_pointcloud(self):
     self.model_list.sort(key=ModelManager.get_dist)
     x = self.model_list[0]
     tf_pose = pm.toTf(pm.fromMsg(x.pose))
     x.bc.sendTransform(tf_pose[0], tf_pose[1], rospy.Time.now(), "/object",
                        "/camera_rgb_optical_frame")
     x.listener.waitForTransform("/world", "/object", rospy.Time(0),
                                 rospy.Duration(5))
     x.point_cloud_data.header.frame_id = "/object"
     pub = rospy.Publisher('/object_pointcloud',
                           sensor_msgs.msg.PointCloud2)
     pub.publish(x.point_cloud_data)
コード例 #24
0
    def publish_poses(self):

        while not rospy.is_shutdown():

            try:
                self._F_left_gripper_base_left_gripper = posemath.fromTf(
                    self._tf_listener.lookupTransform('left_gripper_base',
                                                      'left_gripper',
                                                      rospy.Time(0)))

                self._F_right_gripper_base_right_gripper = posemath.fromTf(
                    self._tf_listener.lookupTransform('right_gripper_base',
                                                      'right_gripper',
                                                      rospy.Time(0)))

            except (tf.LookupException, tf.ConnectivityException,
                    tf.ExtrapolationException):
                rospy.logwarn(
                    '[bin_pose_publisher]: haven\'t received gripper frames')
                rospy.sleep(1.0)
                continue

            for pose in self._bin_poses_tf:
                F = posemath.fromTf(
                    (tuple(pose['translation']), tuple(pose['rotation'])))
                # left arm
                if pose['bin_pose_id'].rfind('left_arm') > 0:
                    translation, rotation = posemath.toTf(
                        F * self._F_left_gripper_base_left_gripper)
                # right arm
                else:
                    translation, rotation = posemath.toTf(
                        F * self._F_right_gripper_base_right_gripper)

                self._tf_broadcaster.sendTransform(translation=translation,
                                                   rotation=rotation,
                                                   time=rospy.Time.now(),
                                                   child=pose['bin_pose_id'],
                                                   parent=pose['bin'])
            self._rate.sleep()
コード例 #25
0
    def moveRelative(self, arm, x=0.0, y=0.0, z=0.0, rx=0.0, ry=0.0, rz=0.0, frameId=None,
                     interpolate=False, stepSize=0.02, execute=True, mustReachGoal=True,
                     controlMode='position_w_id', startConfig=None):
        """ Move the end effector relative to its current pose.
            @param arm - which arm (left_arm or right_arm)
            @param x,y,z,rx,ry,rz - position and orientation (in roll, pitch, yaw)
            @param frameId - relative to which frame to (None means EEF)
            @param interpolate - if true, we attempt to move on a straight line in cartesian space
                                Note that this means, we can not use the roadmap.
            @param stepSize - step size for straight line motion. The smaller the more accurate the
                            line at the cost of slower execution.
            @param mustReachGoal - indicates whether MoveResult.Success should only be returned
                    if the goal pose can be reached. If false, the longest valid trajectory towards
                    the goal is returned. Note this option is only considered if interpolate=True.
            @param startConfig - start configuration from where to plan from. If set, execute is set to
                    False, if not set the current configuration is used as start configuration.
        """
        with self._lock:
            self.checkProcessState()
            if frameId is None:
                frameId = self.getGripperFrameName(arm)
            if startConfig is None:
                startConfig = self.getCurrentConfiguration(arm)

            globalPlanner = self.getGlobalPlanner(arm)
            # express eef pose in frameId
            eefPoseInBase = globalPlanner.computeForwardPositionKinematics(startConfig)
            F_base_eef = kdl.Frame(kdl.Rotation.Quaternion(*eefPoseInBase[3:7]), kdl.Vector(*eefPoseInBase[0:3]))
            F_base_frame = posemath.fromMsg(self.getTransform(frameId, 'base').pose)
            F_frame_eef = F_base_frame.Inverse() * F_base_eef
            F_rel = kdl.Frame(kdl.Rotation.RPY(rx, ry, rz), kdl.Vector(x, y, z))
            F_frame_eef = F_rel * F_frame_eef

            (position, rotation) = posemath.toTf(F_frame_eef)
            goalPose = utils.ArgumentsCollector.createROSPose(position=position, rotation=rotation, frame_id=frameId)
            convertedPoses = self._convertPosesToFrame([goalPose], goalFrame='base')
            if interpolate:
                self.moveHeadAside(arm)
                moveGroup = self.getMoveGroup(arm)
                (planningSuccess, trajList) = globalPlanner.planCartesianPath(startConfig=startConfig,
                                                                              goalPose=convertedPoses[0],
                                                                              stepSize=stepSize,
                                                                              linkName=self.getGripperFrameName(arm),
                                                                              mustReachGoal=mustReachGoal)
                if not planningSuccess:
                    return (MoveResult.PlanFail, None)
                elif not execute:
                    return (MoveResult.Success, trajList)
                else:
                    return self._executeTrajectoryList(trajList, moveGroup, controlMode=controlMode)
            return self.moveToPoses([goalPose], arm, startConfig=startConfig)
コード例 #26
0
def publish_true_object_tran(height):
    """@brief - A debugging function to visualize the origin of the objects
    in from openrave in the scene graph. 

    @param height - the offset from the table to the origin of the object.
    """
    try:
        object_height_offset = eye(4)
        object_height_offset[2,3] = height
        object_height_tf = pm.toTf(pm.fromMatrix(object_height_offset))
        bc = tf.TransformBroadcaster()
        bc.sendTransform(object_height_tf[0], object_height_tf[1], rospy.Time.now(), "/true_obj_pose", "/object")
    except Exception,e:
        rospy.logwarn("failed to publish true object height: %s"%e)
        return []
コード例 #27
0
ファイル: TFCache.py プロジェクト: ARC-2017/team_cvap
    def lookupTransform(self, frame_id, child_frame_id, time):
        '''

        @param frame_id:
        @param child_frame_id:
        @param time:
        @return:
        '''
        with self._lock:

            f_base_child = self._tfs.get(child_frame_id, None)
            f_base_parent = self._tfs.get(frame_id, None)

            gripper = child_frame_id.strip('_base')
            if child_frame_id == 'left_gripper' or child_frame_id == 'right_gripper':
                f_base_gripper = self._get_gripper_frame(gripper)
                f_base_child = copy.deepcopy(f_base_gripper)

            elif child_frame_id == 'left_gripper_base' or child_frame_id == 'right_gripper_base':

                f_base_gripper = self._get_gripper_frame(gripper)
                f_gripper_base_gripper = self._tfs.get(gripper)
                f_base_child = copy.deepcopy(f_base_gripper *
                                             f_gripper_base_gripper.Inverse())

            gripper = frame_id.strip('_base')
            if frame_id == 'left_gripper' or frame_id == 'right_gripper':
                f_base_gripper = self._get_gripper_frame(gripper)
                f_base_parent = copy.deepcopy(f_base_gripper)

            elif frame_id == 'left_gripper_base' or frame_id == 'right_gripper_base':
                f_base_gripper = self._get_gripper_frame(gripper)
                f_gripper_base_gripper = self._tfs.get(gripper)
                f_base_parent = copy.deepcopy(f_base_gripper *
                                              f_gripper_base_gripper.Inverse())

            if f_base_child == None or f_base_parent == None:
                try:
                    position, orientation = self._tf_listener.lookupTransform(
                        frame_id, child_frame_id, rospy.Time(0))

                except (tf.LookupException, tf.ConnectivityException,
                        tf.ExtrapolationException):
                    raise tf.LookupException

                return (position, orientation)

            return posemath.toTf(f_base_parent.Inverse() * f_base_child)
コード例 #28
0
def publish_true_object_tran(height):
    """@brief - A debugging function to visualize the origin of the objects
    in from openrave in the scene graph. 

    @param height - the offset from the table to the origin of the object.
    """
    try:
        object_height_offset = eye(4)
        object_height_offset[2, 3] = height
        object_height_tf = pm.toTf(pm.fromMatrix(object_height_offset))
        bc = tf.TransformBroadcaster()
        bc.sendTransform(object_height_tf[0], object_height_tf[1],
                         rospy.Time.now(), "/true_obj_pose", "/object")
    except Exception, e:
        rospy.logwarn("failed to publish true object height: %s" % e)
        return []
コード例 #29
0
    def lookupTransform(self, frame_id, child_frame_id, time):
        """

        @param frame_id:
        @param child_frame_id:
        @param time:
        @return:
        """
        with self._lock:

            f_base_child = self._tfs.get(child_frame_id, None)
            f_base_parent = self._tfs.get(frame_id, None)

            gripper = child_frame_id.strip("_base")
            if child_frame_id == "left_gripper" or child_frame_id == "right_gripper":
                f_base_gripper = self._get_gripper_frame(gripper)
                f_base_child = copy.deepcopy(f_base_gripper)

            elif child_frame_id == "left_gripper_base" or child_frame_id == "right_gripper_base":

                f_base_gripper = self._get_gripper_frame(gripper)
                f_gripper_base_gripper = self._tfs.get(gripper)
                f_base_child = copy.deepcopy(f_base_gripper * f_gripper_base_gripper.Inverse())

            gripper = frame_id.strip("_base")
            if frame_id == "left_gripper" or frame_id == "right_gripper":
                f_base_gripper = self._get_gripper_frame(gripper)
                f_base_parent = copy.deepcopy(f_base_gripper)

            elif frame_id == "left_gripper_base" or frame_id == "right_gripper_base":
                f_base_gripper = self._get_gripper_frame(gripper)
                f_gripper_base_gripper = self._tfs.get(gripper)
                f_base_parent = copy.deepcopy(f_base_gripper * f_gripper_base_gripper.Inverse())

            if f_base_child == None or f_base_parent == None:
                try:
                    position, orientation = self._tf_listener.lookupTransform(frame_id, child_frame_id, rospy.Time(0))

                except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                    raise tf.LookupException

                return (position, orientation)

            return posemath.toTf(f_base_parent.Inverse() * f_base_child)
コード例 #30
0
def convert_cartesian_relative_goal(global_data, move_tran):
    """@brief - Convert a position relative to the hand's current coordinate system to the robot's base coordinate system.
    @param move_tran - the relative position in the hand's coordinate system. 

    """
    try:
        move_tf = pm.toTf(pm.fromMatrix(move_tran))
        print move_tf
        bc = tf.TransformBroadcaster()
        now = rospy.Time.now()
        bc.sendTransform(move_tf[0], move_tf[1], now, "hand_goal", "hand")
        #listener = tf.TransformListener()
        global_data.listener.waitForTransform("armbase", "arm_goal", now, rospy.Duration(1.0))
        armtip_robot = global_data.listener.lookupTransform('armbase','arm_goal', now)

        #arm_robot = global_data.listener.lookupTransform('armbase', 'arm', now)
        return pm.toMatrix(pm.fromTf(armtip_robot))
    except Exception, e:
        handle_fatal('convert_cartesian_relative_goal failed: error %s.'%e)
コード例 #31
0
    def __init__(self,
                 marker="ar_marker_0",
                 camera_link="camera_link",
                 ee_marker="ee_marker",
                 base_link="base_link",
                 listener=None,
                 broadcaster=None):

        if broadcaster is None:
            self.broadcaster = tf.TransformBroadcaster()
        else:
            self.broadcaster = broadcaster

        if listener is None:
            self.listener = tf.TransformListener()
        else:
            self.listener = listener

        self.ee_marker = ee_marker
        self.base_link = base_link
        self.marker = marker
        self.camera_link = camera_link

        self.id = "%s_to_%s" % (base_link, camera_link)

        self.trans = (0, 0, 0)
        self.rot = (0, 0, 0, 1)

        self.srv = rospy.Service('calibrate', Empty, self.calibrate)
        self.add_type_service = rospy.ServiceProxy('/librarian/add_type',
                                                   librarian_msgs.srv.AddType)
        self.save_service = rospy.ServiceProxy('/librarian/save',
                                               librarian_msgs.srv.Save)
        self.load_service = rospy.ServiceProxy('/librarian/load',
                                               librarian_msgs.srv.Load)

        rospy.wait_for_service('/librarian/add_type')
        self.add_type_service(type="handeye_calibration")

        resp = self.load_service(type="handeye_calibration", id=self.id)
        print resp
        if resp.status.result > 0:
            (self.trans, self.rot) = pm.toTf(pm.fromMsg(yaml.load(resp.text)))
コード例 #32
0
def convert_cartesian_world_goal(global_data, world_tran):
    """@brief - Get armtip goal pose in the arms coordinate system from hand world goal pose.
    This is useful to convert between a hand goal in the world coordinate system to a cartesian goal that
    can be sent directly to the staubli controller. 
    
    @param world_tf - tf of hand goal pose in world coordinates
    """
    try:    
        world_tf = pm.toTf(pm.fromMatrix(world_tran))
        bc = tf.TransformBroadcaster()
        now = rospy.Time.now()
        bc.sendTransform(world_tf[0], world_tf[1], now, "hand_goal", "world")
        #listener = tf.TransformListener()
        global_data.listener.waitForTransform("armbase", "arm_goal", now, rospy.Duration(1.0))
        armtip_robot = global_data.listener.lookupTransform('armbase', 'arm_goal', now)
        
        return pm.toMatrix(pm.fromTf(armtip_robot))
    except Exception, e:
        handle_fatal('convert_cartesian_world_goal failed: error %s.'%e) 
コード例 #33
0
ファイル: costar_arm.py プロジェクト: jacknlliu/costar_stack
    def query_cb(self,req):
       list_of_waypoints = self.query(req)
       if len(list_of_waypoints) == 0:
           return "FAILURE"
       else:
           # set param and publish TF frame appropriately under reserved name
           if self.last_query is not None and self.last_query == req:
               # return idx + 1
               self.last_query_idx += 1
               if self.last_query_idx >= len(list_of_waypoints):
                   self.last_query_idx = 0
           else:
               self.last_query = req
               self.last_query_idx = 0
           
           dist,T,object_t,name = list_of_waypoints[self.last_query_idx]          
           self.query_frame = pm.toTf(T)

           return "SUCCESS"
コード例 #34
0
    def computeAndSendRotations(self,obj_id,dim,num_symmetries,rotation,kdlRot):
            names = []

            angle = rotation
            if angle == 0 and num_symmetries > 0:
                angle = 2*np.pi / float(num_symmetries)
            for i in range(2,num_symmetries):
                # rotate the object
                R = kdl.Frame(kdlRot(angle*(i)))
                (trans,rot) = pm.toTf(R)
                obj_symmetry_name = "%s/%s%d"%(obj_id,dim,i)
                self.broadcaster.sendTransform(
                        (0,0,0),
                        rot,
                        rospy.Time.now(),
                        obj_symmetry_name,
                        obj_id)
                names.append(obj_symmetry_name)

            return names
コード例 #35
0
def pubTF(pose, parentName, childName):
    br = tf2_ros.TransformBroadcaster()
    t = geometry_msgs.msg.TransformStamped()

    t.header.stamp = rospy.Time.now()
    t.header.frame_id = parentName
    t.child_frame_id = childName

    transform = posemath.toTf(pose)

    t.transform.translation.x = transform[0][0]
    t.transform.translation.y = transform[0][1]
    t.transform.translation.z = transform[0][2]

    t.transform.rotation.x = transform[1][0]
    t.transform.rotation.y = transform[1][1]
    t.transform.rotation.z = transform[1][2]
    t.transform.rotation.w = transform[1][3]

    br.sendTransform(t)
コード例 #36
0
    def calibrate(self, req):

        print "%s <- %s, %s <- %s" % (self.camera_link, self.marker,
                                      self.base_link, self.ee_marker)

        T_cm = pm.fromTf(self.lookup(self.camera_link, self.marker))
        T_be = pm.fromTf(self.lookup(self.base_link, self.ee_marker))

        print T_cm
        print T_be

        T = T_cm * T_be.Inverse()
        (self.trans, self.rot) = pm.toTf(T)

        print(self.trans, self.rot)

        print self.save_service(type="handeye_calibration",
                                id=self.id,
                                text=yaml.dump(pm.toMsg(T)))

        return EmptyResponse()
コード例 #37
0
def convert_cartesian_world_goal(global_data, world_tran):
    """@brief - Get armtip goal pose in the arms coordinate system from hand world goal pose.
    This is useful to convert between a hand goal in the world coordinate system to a cartesian goal that
    can be sent directly to the staubli controller. 
    
    @param world_tf - tf of hand goal pose in world coordinates
    """
    try:
        world_tf = pm.toTf(pm.fromMatrix(world_tran))
        bc = tf.TransformBroadcaster()
        now = rospy.Time.now()
        bc.sendTransform(world_tf[0], world_tf[1], now, "hand_goal", "world")
        #listener = tf.TransformListener()
        global_data.listener.waitForTransform("armbase", "arm_goal", now,
                                              rospy.Duration(1.0))
        armtip_robot = global_data.listener.lookupTransform(
            'armbase', 'arm_goal', now)

        return pm.toMatrix(pm.fromTf(armtip_robot))
    except Exception, e:
        handle_fatal('convert_cartesian_world_goal failed: error %s.' % e)
コード例 #38
0
def convert_cartesian_relative_goal(global_data, move_tran):
    """@brief - Convert a position relative to the hand's current coordinate system to the robot's base coordinate system.
    @param move_tran - the relative position in the hand's coordinate system. 

    """
    try:
        move_tf = pm.toTf(pm.fromMatrix(move_tran))
        print move_tf
        bc = tf.TransformBroadcaster()
        now = rospy.Time.now()
        bc.sendTransform(move_tf[0], move_tf[1], now, "hand_goal", "hand")
        #listener = tf.TransformListener()
        global_data.listener.waitForTransform("armbase", "arm_goal", now,
                                              rospy.Duration(1.0))
        armtip_robot = global_data.listener.lookupTransform(
            'armbase', 'arm_goal', now)

        #arm_robot = global_data.listener.lookupTransform('armbase', 'arm', now)
        return pm.toMatrix(pm.fromTf(armtip_robot))
    except Exception, e:
        handle_fatal('convert_cartesian_relative_goal failed: error %s.' % e)
コード例 #39
0
ファイル: wam_teleop.py プロジェクト: jhu-lcsr/lcsr_barrett
    def publish_cmd(self, resync_pose, augmenter_engaged, grasp_opening, time):
        """publish the raw tf frame and the telemanip command"""

        if not self.cmd_frame:
            return

        # Broadcast command frame
        tform = toTf(self.cmd_frame)
        self.broadcaster.sendTransform(tform[0], tform[1], time, self.cmd_frame_id, 'world')

        # Broadcast telemanip command
        telemanip_cmd = TelemanipCommand()
        telemanip_cmd.header.frame_id = 'world'
        telemanip_cmd.header.stamp = time
        telemanip_cmd.posetwist.pose = toMsg(self.cmd_frame)
        telemanip_cmd.resync_pose = resync_pose
        telemanip_cmd.deadman_engaged = self.deadman_engaged
        telemanip_cmd.augmenter_engaged = augmenter_engaged
        telemanip_cmd.grasp_opening = grasp_opening
        telemanip_cmd.estop = False  # TODO: add master estop control
        self.telemanip_cmd_pub.publish(telemanip_cmd)
コード例 #40
0
ファイル: calibration.py プロジェクト: cpaxton/costar_stack
    def __init__(self,
            marker="ar_marker_0",
            camera_link="camera_link",
            ee_marker="ee_marker",
            base_link="base_link",
            listener = None, 
            broadcaster = None):
        
        if broadcaster is None:
            self.broadcaster = tf.TransformBroadcaster()
        else:
            self.broadcaster = broadcaster

        if listener is None:
            self.listener = tf.TransformListener()
        else:
            self.listener = listener
        
        self.ee_marker = ee_marker
        self.base_link = base_link
        self.marker = marker
        self.camera_link = camera_link

        self.id = "%s_to_%s"%(base_link,camera_link)

        self.trans = (0,0,0)
        self.rot = (0,0,0,1)
    
        self.srv = rospy.Service('calibrate',Empty,self.calibrate)
        self.add_type_service = rospy.ServiceProxy('/librarian/add_type', librarian_msgs.srv.AddType)
        self.save_service = rospy.ServiceProxy('/librarian/save', librarian_msgs.srv.Save)
        self.load_service = rospy.ServiceProxy('/librarian/load', librarian_msgs.srv.Load)

        rospy.wait_for_service('/librarian/add_type')
        self.add_type_service(type="handeye_calibration")

        resp = self.load_service(type="handeye_calibration", id=self.id)
        print resp
        if resp.status.result > 0:
          (self.trans, self.rot) = pm.toTf(pm.fromMsg(yaml.load(resp.text)))
コード例 #41
0
ファイル: costar_arm.py プロジェクト: jacknlliu/costar_stack
    def handle_tick(self):
        br = tf.TransformBroadcaster()
        br.sendTransform((0,0,0),tf.transformations.quaternion_from_euler(0,0,0),rospy.Time.now(),"/endpoint",self.end_link)
        if not self.base_link == "base_link":
            br.sendTransform((0,0,0),tf.transformations.quaternion_from_euler(0,0,0),rospy.Time.now(),"/base_link",self.base_link)

        if self.query_frame is not None:
            trans, rot = self.query_frame
            br.sendTransform(trans, rot, rospy.Time.now(),self.query_frame_name,self.world)

        if self.debug:
            if self.backoff_waypoints is not None:
                for tf_name, transform in self.backoff_waypoints:
                    trans, rot = pm.toTf(transform)
                    br.sendTransform(trans, rot, rospy.Time.now(),tf_name,self.world)

        if self.table_frame is not None:
            self.listener.waitForTransform(self.world, self.table_frame, rospy.Time(), rospy.Duration(1.0))
            try:
                self.table_pose = self.listener.lookupTransform(self.world, self.table_frame, rospy.Time(0))
            except tf.ExtrapolationException, e:
                rospy.logwarn(str(e))
コード例 #42
0
ファイル: costar_arm.py プロジェクト: cpaxton/costar_stack
    def handle_tick(self):
        br = tf.TransformBroadcaster()
        br.sendTransform((0,0,0),tf.transformations.quaternion_from_euler(0,0,0),rospy.Time.now(),"/endpoint",self.end_link)
        if not self.base_link == "base_link":
            br.sendTransform((0,0,0),tf.transformations.quaternion_from_euler(0,0,0),rospy.Time.now(),"/base_link",self.base_link)

        if self.query_frame is not None:
            trans, rot = self.query_frame
            br.sendTransform(trans, rot, rospy.Time.now(),self.query_frame_name,self.world)

        if self.debug:
            if self.backoff_waypoints is not None:
                for tf_name, transform in self.backoff_waypoints:
                    trans, rot = pm.toTf(transform)
                    br.sendTransform(trans, rot, rospy.Time.now(),tf_name,self.world)

        if self.table_frame is not None:
            self.listener.waitForTransform(self.world, self.table_frame, rospy.Time(), rospy.Duration(1.0))
            try:
                self.table_pose = self.listener.lookupTransform(self.world, self.table_frame, rospy.Time(0))
            except tf.ExtrapolationException, e:
                rospy.logwarn(str(e))
コード例 #43
0
ファイル: costar_arm.py プロジェクト: cpaxton/costar_stack
    def query_cb(self,req):
        '''
        Wrapper for the QUERY service. This gets the list of waypoints matching some criteria.
        '''
        list_of_waypoints = self.query(req)
        if len(list_of_waypoints) == 0:
            return "FAILURE"
        else:
            # set param and publish TF frame appropriately under reserved name
            if self.last_query is not None and self.last_query == req:
                # return idx + 1
                self.last_query_idx += 1
                if self.last_query_idx >= len(list_of_waypoints):
                    self.last_query_idx = 0
            else:
                self.last_query = req
                self.last_query_idx = 0

            dist,T,object_t,name = list_of_waypoints[self.last_query_idx]
            self.query_frame = pm.toTf(T)

            return "SUCCESS"
コード例 #44
0
ファイル: calibration.py プロジェクト: cpaxton/costar_stack
    def calibrate(self,req):

        print "%s <- %s, %s <- %s"%(self.camera_link,self.marker,self.base_link,self.ee_marker)

        T_cm = pm.fromTf(self.lookup(self.camera_link,self.marker))
        T_be = pm.fromTf(self.lookup(self.base_link,self.ee_marker))

        print T_cm
        print T_be
        
        T = T_cm * T_be.Inverse()
        (self.trans, self.rot) = pm.toTf(T)

        print (self.trans, self.rot)


        print self.save_service(
            type="handeye_calibration",
            id=self.id,
            text=yaml.dump(pm.toMsg(T)))

        return EmptyResponse()
コード例 #45
0
    def odom_cb(self, msg):
        stamp = msg.header.stamp

        try:
            self._tfl.waitForTransform('base_footprint', 'odom', stamp, timeout=self._timeout)
            o2b_p, o2b_q = self._tfl.lookupTransform('odom', 'base_footprint', stamp)#'base_footprint', 'odom', stamp)
        except tf.Exception as e:
            rospy.loginfo('w2o tf error  : {}'.format(e))
            return

        o2b_T = tx.concatenate_matrices(
                tx.translation_matrix(o2b_p), tx.quaternion_matrix(o2b_q)
                )
        o2b_T_i = tx.inverse_matrix(o2b_T)

        w2b = msg.pose.pose
        w2b_T = pm.toMatrix(pm.fromMsg(w2b))

        w2o_T = tx.concatenate_matrices(w2b_T, o2b_T_i)
        
        txn, rxn = pm.toTf(pm.fromMatrix(w2o_T))

        self._tfb.sendTransform(txn, rxn, stamp, 'odom', 'world')
コード例 #46
0
    def process_grasp_msg(self, grasp_msg):
        """@brief - Attempt to make the adjustment specified by grasp_msg
        1. plan a path to the a place 15cm back from the new pose
        """
        print 'regular move:'
        print grasp_msg
        #release the object
        tp.open_barrett()

        #get the robot's current location and calculate the absolute tran after the adjustment
        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        handGoalInBase = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))

        try:
            hand_tf = pm.toTf(pm.fromMatrix(handGoalInBase))
            bc = tf.TransformBroadcaster()
            now = rospy.Time.now()
            bc.sendTransform(hand_tf[0], hand_tf[1], now, "hand_goal", "armbase")
            self.global_data.listener.waitForTransform("armbase", "arm_goal", now, rospy.Duration(1.0))
            armtip_robot = self.global_data.listener.lookupTransform('armbase', 'arm_goal', now)
            armGoalInBase = pm.toMatrix(pm.fromTf(armtip_robot))
        except Exception, e:
            handle_fatal('convert_cartesian_world_goal failed: error %s.'%e) 
コード例 #47
0
 def __call__(self):
     tf_pose = pm.toTf(pm.fromMsg(self.pose))
     self.bc.sendTransform(tf_pose[0], tf_pose[1], rospy.Time.now(), self.object_name, "/camera_rgb_optical_frame")
コード例 #48
0
ファイル: loc_eval.py プロジェクト: yycho0108/udacity_bot
def roll_rospose(x):
    txn, qxn = pm.toTf(pm.fromMsg(x))
    x,y,_ = txn
    rz = tx.euler_from_quaternion(qxn)[-1]
    res = [x,y,rz]
    return res
コード例 #49
0
ファイル: hydra_teleop.py プロジェクト: cpaxton/lcsr_barrett
    def joy_cb(self, msg):
        # Convenience
        side = self.side
        b = msg.buttons
        lb = self.last_buttons

        if (rospy.Time.now() - self.last_hand_cmd) < rospy.Duration(0.03):
            return

        # Update enabled fingers
        self.move_f[0] =   self.move_f[0] ^   (b[self.B1[side]] and not lb[self.B1[side]])
        self.move_f[1] =   self.move_f[1] ^   (b[self.B2[side]] and not lb[self.B2[side]])
        self.move_f[2] =   self.move_f[2] ^   (b[self.B3[side]] and not lb[self.B3[side]])
        self.move_spread = self.move_spread ^ (b[self.B4[side]] and not lb[self.B4[side]])
        self.move_all =    self.move_all ^    (b[self.B_CENTER[side]] and not lb[self.B_CENTER[side]])

        # Check if the deadman is engaged
        if msg.axes[self.DEADMAN[side]] < 0.01:
            if self.deadman_engaged:
                self.hand_cmd.mode = [oro_barrett_msgs.msg.BHandCmd.MODE_VELOCITY] * 4
                self.hand_cmd.cmd = [0.0, 0.0, 0.0, 0.0]
                self.hand_pub.publish(self.hand_cmd)
                self.last_hand_cmd = rospy.Time.now()
            self.deadman_engaged = False
            self.deadman_max = 0.0
        else:
            self.handle_hand_cmd(msg)
            self.handle_cart_cmd(msg)

        self.last_buttons = msg.buttons
        self.last_axes = msg.axes

        # republish markers
        for i,m in enumerate(self.master_target_markers.markers):
            m.header.stamp = rospy.Time.now()

            if (i <3 and not self.move_f[i]) or (i==3 and not self.move_spread):
                m.color = self.color_orange
            elif not self.move_all:
                m.color = self.color_red
            else:
                if self.deadman_engaged:
                    m.color = self.color_green
                else:
                    m.color = self.color_gray

            if i < 3:
                if i != 2:
                    p = m.pose.position
                    s = (-1.0 if i == 0 else 1.0)
                    p.x, p.y, p.z = finger_point(
                        self.hand_position[3] * s, 
                        l = 0.025 * s)
        self.marker_pub.publish(self.master_target_markers)

        # Broadcast the command if it's defined
        if self.cmd_frame:
            # Broadcast command frame
            tform = toTf(self.cmd_frame)
            self.broadcaster.sendTransform(tform[0], tform[1], rospy.Time.now(), self.cmd_frame_id, 'world')

            # Broadcast telemanip command
            telemanip_cmd = TelemanipCommand()
            telemanip_cmd.header.frame_id = 'world'
            telemanip_cmd.header.stamp = rospy.Time.now()
            telemanip_cmd.posetwist.pose = toMsg(self.cmd_frame)
            telemanip_cmd.resync_pose = msg.buttons[self.THUMB_CLICK[side]] == 1
            telemanip_cmd.deadman_engaged = self.deadman_engaged
            if msg.axes[self.THUMB_Y[side]] > 0.5:
                telemanip_cmd.grasp_opening = 0.0
            elif msg.axes[self.THUMB_Y[side]] < -0.5:
                telemanip_cmd.grasp_opening = 1.0
            else:
                telemanip_cmd.grasp_opening = 0.5
            telemanip_cmd.estop = False  # TODO: add master estop control
            self.telemanip_cmd_pub.publish(telemanip_cmd)
コード例 #50
0
 def broadcast_tf(self):
     tf_pose = pm.toTf(pm.fromMsg(self.pose.pose))
     self.bc.sendTransform(tf_pose[0], tf_pose[1], rospy.Time.now(), self.object_name, self.pose.header.frame_id)
コード例 #51
0
 def pub_graspit_grasp_tf(self, object_name, graspit_grasp_msg):
     # type: (str, graspit_msgs.msg.Grasp) -> ()
     # Is this needed anymore?
     tf_pose = pm.toTf(pm.fromMsg(graspit_grasp_msg.final_grasp_pose))
     self.tf_broadcaster.sendTransform(tf_pose[0], tf_pose[1], rospy.Time.now(), "/grasp_approach_tran", object_name)
コード例 #52
0
ファイル: grasping_lib.py プロジェクト: fevb/team_cvap
    

    r = kdl.Rotation()
    for i in range(3):
        for j in range(3):
            if i == 0:
                r[j,i] = x[j]
            elif i == 1:
                r[j,i] = y[j]
            else:
                r[j,i] = z[j]

    t = F_obj_frame.p - F_bin_frame.p

    f = kdl.Frame(r, t)
    graspFrame = posemath.toTf(f)

    return graspFrame

def publishGraspingFrame(bin_name, obj_name):
    
    br = tf.TransformBroadcaster()
    listener = tf.TransformListener()
    r = rospy.Rate(10)

    while not rospy.is_shutdown():
        graspFrame = getGraspFrame(listener, bin_name, obj_name)

        br.sendTransform(graspFrame[0], graspFrame[1], \
                 rospy.Time.now(), \
                 "/grasp_frame",  \
コード例 #53
0
ファイル: calibrate.py プロジェクト: gt-ros-pkg/hrl
    listener = tf.TransformListener()
    trans_list = []
    rot_list = []

    rate = rospy.Rate(10.0)
    while not rospy.is_shutdown():
        try:
            (trans, rot) = listener.lookupTransform(sys.argv[1], sys.argv[2], rospy.Time(0))
            (trans2, rot2) = listener.lookupTransform(sys.argv[3], sys.argv[4], rospy.Time(0))
            msg1 = Pose()
            msg2 = Pose()
            msg1.position.x, msg1.position.y, msg1.position.z = trans[0], trans[1], trans[2]
            msg2.position.x, msg2.position.y, msg2.position.z = trans2[0], trans2[1], trans2[2]
            msg1.orientation.x, msg1.orientation.y, msg1.orientation.z, msg1.orientation.w = rot[0], rot[1], rot[2], rot[3]
            msg2.orientation.x, msg2.orientation.y, msg2.orientation.z, msg2.orientation.w = rot2[0], rot2[1], rot2[2], rot2[3]
            (trans_tot, rot_tot) = pm.toTf(pm.fromMsg(msg1)*pm.fromMsg(msg2))
            print "translation: ", trans_tot, ", rotation :", rot_tot
            trans_list.append(trans_tot)
            rot_list.append(rot_tot)
            
        except (tf.LookupException, tf.ConnectivityException):
            continue
        rate.sleep()
    trans_str = str(np.median(np.array(trans_list), axis = 0))
    rot_str = str(np.median(np.array(rot_list), axis = 0))
    print "median of translation :", trans_str
    print "median of rotation :", rot_str
    
    try:
        os.remove('../../launch/kinect_playpen_to_torso_lift_link.launch')
    except:
コード例 #54
0
ファイル: MoveItInterface.py プロジェクト: ARC-2017/team_cvap
    def moveRelative(self,
                     arm,
                     x=0.0,
                     y=0.0,
                     z=0.0,
                     rx=0.0,
                     ry=0.0,
                     rz=0.0,
                     frameId=None,
                     interpolate=False,
                     stepSize=0.02,
                     execute=True,
                     mustReachGoal=True,
                     controlMode='position_w_id',
                     startConfig=None):
        """ Move the end effector relative to its current pose.
            @param arm - which arm (left_arm or right_arm)
            @param x,y,z,rx,ry,rz - position and orientation (in roll, pitch, yaw)
            @param frameId - relative to which frame to (None means EEF)
            @param interpolate - if true, we attempt to move on a straight line in cartesian space
                                Note that this means, we can not use the roadmap.
            @param stepSize - step size for straight line motion. The smaller the more accurate the
                            line at the cost of slower execution.
            @param mustReachGoal - indicates whether MoveResult.Success should only be returned
                    if the goal pose can be reached. If false, the longest valid trajectory towards
                    the goal is returned. Note this option is only considered if interpolate=True.
            @param startConfig - start configuration from where to plan from. If set, execute is set to
                    False, if not set the current configuration is used as start configuration.
        """
        with self._lock:
            self.checkProcessState()
            if frameId is None:
                frameId = self.getGripperFrameName(arm)
            if startConfig is None:
                startConfig = self.getCurrentConfiguration(arm)

            globalPlanner = self.getGlobalPlanner(arm)
            # express eef pose in frameId
            eefPoseInBase = globalPlanner.computeForwardPositionKinematics(
                startConfig)
            F_base_eef = kdl.Frame(
                kdl.Rotation.Quaternion(*eefPoseInBase[3:7]),
                kdl.Vector(*eefPoseInBase[0:3]))
            F_base_frame = posemath.fromMsg(
                self.getTransform(frameId, 'base').pose)
            F_frame_eef = F_base_frame.Inverse() * F_base_eef
            F_rel = kdl.Frame(kdl.Rotation.RPY(rx, ry, rz),
                              kdl.Vector(x, y, z))
            F_frame_eef = F_rel * F_frame_eef

            (position, rotation) = posemath.toTf(F_frame_eef)
            goalPose = utils.ArgumentsCollector.createROSPose(
                position=position, rotation=rotation, frame_id=frameId)
            convertedPoses = self._convertPosesToFrame([goalPose],
                                                       goalFrame='base')
            if interpolate:
                self.moveHeadAside(arm)
                moveGroup = self.getMoveGroup(arm)
                (planningSuccess, trajList) = globalPlanner.planCartesianPath(
                    startConfig=startConfig,
                    goalPose=convertedPoses[0],
                    stepSize=stepSize,
                    linkName=self.getGripperFrameName(arm),
                    mustReachGoal=mustReachGoal)
                if not planningSuccess:
                    return (MoveResult.PlanFail, None)
                elif not execute:
                    return (MoveResult.Success, trajList)
                else:
                    return self._executeTrajectoryList(trajList,
                                                       moveGroup,
                                                       controlMode=controlMode)
            return self.moveToPoses([goalPose], arm, startConfig=startConfig)
コード例 #55
0
 def pub_moveit_grasp_tf(self, object_name, moveit_grasp_msg):
     # type: (str, moveit_msgs.msg.Grasp) -> ()
     # Is this needed anymore?
     tf_pose = pm.toTf(pm.fromMsg(moveit_grasp_msg.grasp_pose.pose))
     self.tf_broadcaster.sendTransform(tf_pose[0], tf_pose[1], rospy.Time.now(), "/moveit_end_effector_frame", object_name)
コード例 #56
0
 def __call__(self):
     tf_pose = pm.toTf(pm.fromMsg(self.pose))
     self.bc.sendTransform(tf_pose[0], tf_pose[1], rospy.Time.now(),
                           self.object_name,
                           "/camera_rgb_optical_frame")
コード例 #57
0
    def publish_cartesian_waypoints(self):

        for (name, pose) in self.cart_waypoints:
            (trans, rot) = pm.toTf(pm.fromMsg(pose))
            self.broadcaster.sendTransform(trans, rot, rospy.Time.now(), name,
                                           self.world)
コード例 #58
0
    rospy.wait_for_service('getCartesian')
    try:
        get_cartesian = rospy.ServiceProxy( 'getCartesian', staubli_tx60.srv.GetCartesian )
        resp1 = get_cartesian()
        # make srv x y z  rx ry rz euler angle representation into pose message
        pos = geometry_msgs.msg.Point( x = resp1.x, y = resp1.y, z = resp1.z )
        q = tf.transformations.quaternion_from_euler( resp1.rx , resp1.ry, resp1.rz ,'rxyz' )
        quat =  geometry_msgs.msg.Quaternion( x = q[0], y = q[1], z = q[2], w = q[3] )
        return geometry_msgs.msg.Pose( position = pos, orientation = quat )
    except rospy.ServiceException, e:
        print "Service call failed: %s"%e
        return []

def get_staubli_cartesian_as_tran():
    current_pose = get_staubli_cartesian_as_pose_msg()
    return pm.toMatrix(pm.fromMsg(current_pose))


rospy.init_node('arm_position_publisher')
tf_publisher = tf.TransformBroadcaster()
loop = rospy.Rate(3.0)
while not rospy.is_shutdown():
    t = get_staubli_cartesian_as_tran()
    t_tf = pm.toTf(pm.fromMatrix(t))
    tf_publisher.sendTransform(t_tf[0], t_tf[1], rospy.Time.now(),"/arm","/armbase")
    loop.sleep()


 
  
コード例 #59
0
ファイル: republisher.py プロジェクト: cpaxton/costar_stack
@author: Chris
"""

import rospy
import tf
import tf_conversions.posemath as pm
from geometry_msgs.msg import PoseArray

last_pose = None

def callback(msg):
    if len(msg.poses) > 0:
        global last_pose
        last_pose = msg.poses[0]
        
if __name__ == '__main__':
    rospy.init_node('posearray_tf_republisher')
    
    sub = rospy.Subscriber('poses_out',PoseArray,callback)

    br = tf.TransformBroadcaster()
    try:
        rate = rospy.Rate(10)    
        while not rospy.is_shutdown():
            if not (last_pose is None):
                (trans, rot) = pm.toTf(pm.fromMsg(last_pose))
                br.sendTransform(trans, rot, rospy.Time.now(), 'drill', 'world')
            rate.sleep()
    except rospy.ROSInterruptException, e:
        print e