def getTransformations(self):
        pose = self.listener.lookupTransform('torso_base', self.prefix+'_arm_7_link', rospy.Time(0))
        self.T_B_W = pm.fromTf(pose)

        pose_tool = self.listener.lookupTransform(self.prefix+'_arm_7_link', self.prefix+'_arm_tool', rospy.Time(0))
        self.T_W_T = pm.fromTf(pose_tool)
        self.T_T_W = self.T_W_T.Inverse()
    def _collect_static_tfs(self):

        while len(self._missing_frames) > 0 and not rospy.is_shutdown():
            self._lock.acquire()
            for frame in self._missing_frames:

                for trial in range(10):
                    if frame.rfind("gripper") > 0:
                        try:
                            (trans, rot) = self._tf_listener.lookupTransform(frame + "_base", frame, rospy.Time(0))
                            self._tfs[frame] = posemath.fromTf((trans, rot))
                            break

                        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                            rospy.sleep(0.5)

                    else:
                        try:
                            (trans, rot) = self._tf_listener.lookupTransform("/base", frame, rospy.Time(0))
                            self._missing_frames.remove(frame)
                            self._tfs[frame] = posemath.fromTf((trans, rot))
                            break

                        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                            rospy.sleep(0.5)

            self._lock.release()
            self._rate.sleep()
Esempio n. 3
0
    def _collect_static_tfs(self):

        while len(self._missing_frames) > 0 and not rospy.is_shutdown():
            self._lock.acquire()
            for frame in self._missing_frames:

                for trial in range(10):
                    if frame.rfind('gripper') > 0:
                        try:
                            (trans, rot) = self._tf_listener.lookupTransform(
                                frame + '_base', frame, rospy.Time(0))
                            self._tfs[frame] = posemath.fromTf((trans, rot))
                            break

                        except (tf.LookupException, tf.ConnectivityException,
                                tf.ExtrapolationException):
                            rospy.sleep(0.5)

                    else:
                        try:
                            (trans, rot) = self._tf_listener.lookupTransform(
                                '/base', frame, rospy.Time(0))
                            self._missing_frames.remove(frame)
                            self._tfs[frame] = posemath.fromTf((trans, rot))
                            break

                        except (tf.LookupException, tf.ConnectivityException,
                                tf.ExtrapolationException):
                            rospy.sleep(0.5)

            self._lock.release()
            self._rate.sleep()
Esempio n. 4
0
    def handle_cart_cmd(self, scaling):
        """"""

        try:
            # Get the current position of the hydra
            input_frame = fromTf(self.listener.lookupTransform(
                self.input_ref_frame_id,
                self.input_frame_id,
                rospy.Time(0)))

            # Get the current position of the end-effector
            tip_frame = fromTf(self.listener.lookupTransform(
                self.input_ref_frame_id,
                self.tip_link,
                rospy.Time(0)))

            # Capture the current position if we're starting to move
            if not self.deadman_engaged:
                self.deadman_engaged = True
                self.cmd_origin = input_frame
                self.tip_origin = tip_frame
            else:
                self.cart_scaling = scaling
                # Update commanded TF frame
                cmd_twist = kdl.diff(self.cmd_origin, input_frame)
                cmd_twist.vel = self.scale*self.cart_scaling*cmd_twist.vel
                #rospy.logwarn(cmd_twist)
                self.cmd_frame = kdl.addDelta(self.tip_origin, cmd_twist)

        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as ex:
            rospy.logwarn(str(ex))
Esempio n. 5
0
    def handle_cart_cmd(self, msg):
        side = self.side
        try:
            # Get the current position of the hydra
            hydra_frame = fromTf(self.listener.lookupTransform(
                '/hydra_base',
                '/hydra_'+self.SIDE_STR[side]+'_grab',
                rospy.Time(0)))

            # Get the current position of the end-effector
            tip_frame = fromTf(self.listener.lookupTransform(
                '/world',
                self.tip_link,
                rospy.Time(0)))

            # Capture the current position if we're starting to move
            if not self.deadman_engaged:
                self.deadman_engaged = True
                self.cmd_origin = hydra_frame
                self.tip_origin = tip_frame
            else:
                self.deadman_max = max(self.deadman_max, msg.axes[self.DEADMAN[side]])
                # Update commanded TF frame
                cmd_twist = kdl.diff(self.cmd_origin, hydra_frame)
                cmd_twist.vel = self.scale*self.deadman_max*cmd_twist.vel
                self.cmd_frame = kdl.addDelta(self.tip_origin, cmd_twist)

        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as ex:
            rospy.logwarn(str(ex))
    def test_current_pose(self):
        """ Test the current pose method

            1. create and publish tf
            2. get current pose with base and ref
            3. move roboter to ref
            4. get current pose with ref and base
            5. analyse positions
        """
        rospy.loginfo("test_current_pose")

        self.robot.move(Ptp(goal=[0, 0, 0, 0, 0, 0]))

        # get and transform test data
        ref_frame = self.test_data.get_pose("Blend_1_Mid", PLANNING_GROUP_NAME)
        ref_frame_tf = pm.toTf(pm.fromMsg(ref_frame))

        rospy.sleep(rospy.Duration.from_sec(0.5))

        # init
        tcp_ref = [[0, 0, 0], [0, 0, 0, 1]]
        tcp_base = [[0, 0, 0], [0, 0, 0, 1]]
        time_tf = rospy.Time.now()
        base_frame = self.robot._robot_commander.get_planning_frame()

        try:
            self.tf.sendTransform(ref_frame_tf[0], ref_frame_tf[1],
                                  time_tf, "ref_frame", base_frame)

            self.tf_listener.waitForTransform(base_frame, "ref_frame", time_tf, rospy.Duration(2, 0))
            rospy.sleep(rospy.Duration.from_sec(0.1))
            tcp_ref = self.tf_listener.lookupTransform("ref_frame", "prbt_tcp", time_tf)
            tcp_base = self.tf_listener.lookupTransform(base_frame, "prbt_tcp", time_tf)
        except Exception as e:
            print e
            rospy.logerr("Failed to setup transforms for test!")

        tcp_ref_msg = pm.toMsg(pm.fromTf(tcp_ref))
        tcp_base_msg = pm.toMsg(pm.fromTf(tcp_base))

        # read current pose, move robot and do it again
        start_bf = self.robot.get_current_pose()
        start_rf = self.robot.get_current_pose(base="ref_frame")

        self.robot.move(Ptp(goal=ref_frame))

        # resending tf (otherwise transform pose could read old transform to keep time close together.
        time_tf = rospy.Time.now()
        self.tf.sendTransform(ref_frame_tf[0], ref_frame_tf[1], time_tf, "ref_frame", base_frame)
        self.tf_listener.waitForTransform(base_frame, "ref_frame", time_tf, rospy.Duration(1, 0))

        ref_frame_bf = self.robot.get_current_pose()
        ref_frame_rf = self.robot.get_current_pose(base="ref_frame")

        self._analyze_pose(tcp_base_msg, start_bf)
        self._analyze_pose(tcp_ref_msg, start_rf)
        self._analyze_pose(ref_frame, ref_frame_bf)
        self._analyze_pose(Pose(orientation=Quaternion(0, 0, 0, 1)), ref_frame_rf)
    def getContactPointsInFrame(self, threshold, frame_name):
        self.tactile_lock.acquire()
        latest_index = copy.copy(self.tactile_data_index)
        self.tactile_lock.release()

        tactile_frames_names = [
        '/'+self.prefix+'_HandFingerOneKnuckleThreeLink',
        '/'+self.prefix+'_HandFingerTwoKnuckleThreeLink',
        '/'+self.prefix+'_HandFingerThreeKnuckleThreeLink',
        '/'+self.prefix+'_HandPalmLink']
        contacts = []
        forces = []

        pressure_frames = [self.pressure_frames, self.pressure_frames, self.pressure_frames, self.palm_pressure_frames]
        for tact_idx in range(len(tactile_frames_names)):
            tact_name = tactile_frames_names[tact_idx]
            for buf_prev_idx in range(20, self.tactile_data_len-2):
                buf_idx = latest_index - buf_prev_idx
#                buf_idx_prev = buf_idx - 1
                if buf_idx < 0:
                    buf_idx += self.tactile_data_len
#                if buf_idx_prev < 0:
#                    buf_idx_prev += self.tactile_data_len

                time = self.tactile_data[buf_idx][0]
                tactile_data = self.tactile_data[buf_idx][tact_idx+1]

                if self.listener.canTransform('torso_base', tact_name, time) and self.listener.canTransform('torso_base', frame_name, time):
                    T_B_F = pm.fromTf(self.listener.lookupTransform('torso_base', tact_name, time))
                    T_B_R = pm.fromTf(self.listener.lookupTransform('torso_base', frame_name, time))
                    T_R_B = T_B_R.Inverse()
                    for i in range(0, len(pressure_frames[tact_idx])):
#                        print "i"
                        neighbourhood_ok = True
                        # check the time neighbourhood
                        for buf_neigh in range(-19, 19):
                            buf_neigh_idx = buf_idx+buf_neigh
                            if buf_neigh_idx < 0:
                                buf_neigh_idx += self.tactile_data_len
                            elif buf_neigh_idx >= self.tactile_data_len:
                                buf_neigh_idx -= self.tactile_data_len
#                            print buf_neigh_idx
#                            print self.tactile_data[0][1]
                            if self.tactile_data[buf_neigh_idx][tact_idx+1][i] < threshold:
#                            if self.tactile_data[0][1][0] < threshold:
                                neighbourhood_ok = False
                                break
                        if neighbourhood_ok:#tactile_data[i] > threshold:
#                            contacts.append( T_R_B * T_B_F * pressure_frames[tact_idx][i] * PyKDL.Vector() )
                            h1 = self.pressure_info.sensor[tact_idx].halfside1[i]
                            h2 = self.pressure_info.sensor[tact_idx].halfside2[i]
                            contacts.append( (T_R_B * T_B_F * pressure_frames[tact_idx][i], PyKDL.Vector(h1.x, h1.y, h1.z).Norm(), PyKDL.Vector(h2.x, h2.y, h2.z).Norm()) )
                    break

        return contacts
Esempio n. 8
0
    def calib(self):

        self._left_gripper_pose = kdl.Frame()
        self._right_gripper_pose = kdl.Frame()
        self._tote_pose = kdl.Frame()

        while not rospy.is_shutdown():
            try:
                (trans, rot) = self._tf_listener.lookupTransform(
                    '/base', 'left_gripper', rospy.Time(0))
                self._left_gripper_pose = posemath.fromTf((trans, rot))

                (trans, rot) = self._tf_listener.lookupTransform(
                    '/base', 'right_gripper', rospy.Time(0))
                self._right_gripper_pose = posemath.fromTf((trans, rot))
                break
            except:
                rospy.logerr(
                    rospy.get_name() +
                    ': could not get left/right gripper poses for calibration of the tote'
                )
                rospy.sleep(1.0)

        vl = copy.deepcopy(self._left_gripper_pose.p)
        vl[2] = 0.0

        vr = copy.deepcopy(self._right_gripper_pose.p)
        vr[2] = 0.0

        v = vr - vl

        # center on the lower edge of tote closest to the Baxter
        c = vl + 0.5 * v

        # calculate the perpendicular vector
        v.Normalize()
        v_perp = kdl.Rotation.RotZ(0.5 * np.pi) * v

        # calculate the center of the tote
        center = c + self._tote_width * 0.5 * v_perp
        self._tote_pose.p[0] = center[0]
        self._tote_pose.p[1] = center[1]
        self._tote_pose.p[2] = (
            self._left_gripper_pose.p[2] +
            self._right_gripper_pose.p[2]) * 0.5 - self._tote_height

        # calculate the angle of rotation along the Z axis
        rotz_angle = np.arctan(v[0] / -v[1])
        rotz_angle = rotz_angle - np.pi * 0.5
        R = kdl.Rotation.RPY(0, 0, rotz_angle)

        self._tote_pose.M = R

        return True
Esempio n. 9
0
def getGraspingAxis(bin_frame, obj_frame, object_name, simtrackUsed):

    '''
    this function assumes everything is represented in the quaternions in the /base_link frame
    '''
    if object_name.endswith('_scan'):
        object_name = object_name[:-5]
    dictObj = objDict()
    objSpec = dictObj.getEntry(object_name)

    F_bin_frame = posemath.fromTf(bin_frame)
    F_obj_frame = posemath.fromTf(obj_frame)

    objRed = F_obj_frame.M * kdl.Vector(1.0, 0.0, 0.0)
    objGreen = F_obj_frame.M * kdl.Vector(0.0, 1.0, 0.0)
    objBlue = F_obj_frame.M * kdl.Vector(0.0, 0.0, 1.0)
    
    binRed = F_bin_frame.M * kdl.Vector(1.0, 0.0, 0.0)
    binGreen = F_bin_frame.M * kdl.Vector(0.0, 1.0, 0.0)
    binBlue = F_bin_frame.M * kdl.Vector(0.0, 0.0, 1.0)
    
    rRProj = kdl.dot(objRed , binRed)
    gRProj = kdl.dot(objGreen, binRed)
    bRProj = kdl.dot(objBlue, binRed)

    tmpApproach1 = [abs(rRProj), abs(gRProj), abs(bRProj)]

    if simtrackUsed:
        for i in range(3):
            if i in objSpec.invalidApproachAxis:
                tmpApproach1[i] = 0

    tmpApproach2 = [rRProj, gRProj, bRProj]
    axisApproach = tmpApproach1.index(max(tmpApproach1))


    objAxes = [objRed, objGreen, objBlue]
    tmpGrasp1 = []

    for i in range(3):
        if simtrackUsed:
            if i == axisApproach or i in objSpec.invalidGraspAxis:
                tmpGrasp1.append(0)
                continue
        tmpGrasp1.append(kdl.dot(objAxes[i], binBlue))

    tmpGrasp2 = [abs(t) for t in tmpGrasp1]

    axisGrasp = tmpGrasp2.index(max(tmpGrasp2))


    return axisApproach, tmpApproach2[axisApproach]/abs(tmpApproach2[axisApproach]), axisGrasp, tmpGrasp1[axisGrasp]/abs(tmpGrasp1[axisGrasp])
    def within_tolerance(self, tip_pose, target_pose, scale=1.0):
        # compute cartesian command error
        tip_frame = fromTf(tip_pose)
        target_frame = fromTf(target_pose)

        twist_err = kdl.diff(tip_frame, target_frame)
        linear_err = twist_err.vel.Norm()
        angular_err = twist_err.rot.Norm()

        #print("linear: %g, angular %g" % (linear_err, angular_err))

        # decide if planning is needed
        return linear_err < scale*self.linear_err_threshold and angular_err < scale*self.angular_err_threshold
    def within_tolerance(self, tip_pose, target_pose, scale=1.0):
        # compute cartesian command error
        tip_frame = fromTf(tip_pose)
        target_frame = fromTf(target_pose)

        twist_err = kdl.diff(tip_frame, target_frame)
        linear_err = twist_err.vel.Norm()
        angular_err = twist_err.rot.Norm()

        #print("linear: %g, angular %g" % (linear_err, angular_err))

        # decide if planning is needed
        return linear_err < scale * self.linear_err_threshold and angular_err < scale * self.angular_err_threshold
def getGraspingAxis(bin_frame, obj_frame, object_name, simtrackUsed):
    '''
    this function assumes everything is represented in the quaternions in the /base_link frame
    '''
    if object_name.endswith('_scan'):
        object_name = object_name[:-5]
    dictObj = objDict()
    objSpec = dictObj.getEntry(object_name)

    F_bin_frame = posemath.fromTf(bin_frame)
    F_obj_frame = posemath.fromTf(obj_frame)

    objRed = F_obj_frame.M * kdl.Vector(1.0, 0.0, 0.0)
    objGreen = F_obj_frame.M * kdl.Vector(0.0, 1.0, 0.0)
    objBlue = F_obj_frame.M * kdl.Vector(0.0, 0.0, 1.0)

    binRed = F_bin_frame.M * kdl.Vector(1.0, 0.0, 0.0)
    binGreen = F_bin_frame.M * kdl.Vector(0.0, 1.0, 0.0)
    binBlue = F_bin_frame.M * kdl.Vector(0.0, 0.0, 1.0)

    rRProj = kdl.dot(objRed, binRed)
    gRProj = kdl.dot(objGreen, binRed)
    bRProj = kdl.dot(objBlue, binRed)

    tmpApproach1 = [abs(rRProj), abs(gRProj), abs(bRProj)]

    if simtrackUsed:
        for i in range(3):
            if i in objSpec.invalidApproachAxis:
                tmpApproach1[i] = 0

    tmpApproach2 = [rRProj, gRProj, bRProj]
    axisApproach = tmpApproach1.index(max(tmpApproach1))

    objAxes = [objRed, objGreen, objBlue]
    tmpGrasp1 = []

    for i in range(3):
        if simtrackUsed:
            if i == axisApproach or i in objSpec.invalidGraspAxis:
                tmpGrasp1.append(0)
                continue
        tmpGrasp1.append(kdl.dot(objAxes[i], binBlue))

    tmpGrasp2 = [abs(t) for t in tmpGrasp1]

    axisGrasp = tmpGrasp2.index(max(tmpGrasp2))

    return axisApproach, tmpApproach2[axisApproach] / abs(
        tmpApproach2[axisApproach]), axisGrasp, tmpGrasp1[axisGrasp] / abs(
            tmpGrasp1[axisGrasp])
Esempio n. 13
0
    def tick(self):
        '''
        Look for all transforms in the list
        '''

        if not self.listener.frameExists(self.root):
            rospy.logwarn("%s was missing" % self.root)
            return

        count = 0
        avg_p = np.zeros(3)
        avg_q = np.zeros(4)
        for name, pose in self.transforms.items():
            if self.listener.frameExists(name):
                t = self.listener.getLatestCommonTime(self.root, name)
                p, q = self.listener.lookupTransform(self.root, name, t)
                F = pm.fromTf((p, q))
                F = F * pose
                p, q = pm.toTf(F)
                avg_p += np.array(p)
                avg_q += np.array(q)
                count += 1
        if count > 0:
            avg_p /= count
            avg_q /= count
            avg_q /= np.linalg.norm(avg_q)

            if self.history_length > 0:
                # use history to smooth predictions
                if len(self.history) >= self.history_length:
                    self.history.pop()
                self.history.appendleft((avg_p, avg_q))
                avg_p = np.zeros(3)
                avg_q = np.zeros(4)
                for p, q in self.history:
                    avg_p += p
                    avg_q += q

                avg_p /= len(self.history)
                avg_q /= len(self.history)
                avg_q /= np.linalg.norm(avg_q)

            if self.offset is not None:
                # apply some offset after we act
                F = pm.fromTf((avg_p, avg_q))
                F = F * self.offset
                avg_p, avg_q = pm.toTf(F)

            self.broadcaster.sendTransform(avg_p, avg_q, rospy.Time.now(),
                                           self.name, self.root)
Esempio n. 14
0
def omni_callback(joint_state):
    global update_pub, last_button_state
    sendTf(marker_tf, '/marker', fixed_frame)
    sendTf(zero_tf, '/base', fixed_frame)
    sendTf(marker_ref, '/marker_ref', fixed_frame)
    sendTf(stylus_ref, '/stylus_ref', fixed_frame)
        
    try:
        update = InteractionCursorUpdate()
        update.pose.header = std_msgs.msg.Header()
        update.pose.header.stamp = rospy.Time.now()
        update.pose.header.frame_id = 'marker_ref'
        if button_clicked and last_button_state == update.GRAB:
            update.button_state = update.KEEP_ALIVE
        elif button_clicked and last_button_state == update.KEEP_ALIVE:
            update.button_state = update.KEEP_ALIVE
        elif button_clicked:
            update.button_state = update.GRAB
        elif last_button_state == update.KEEP_ALIVE:
            update.button_state = update.RELEASE
        else:
            update.button_state = update.NONE
            updateRefs()
        update.key_event = 0

        control_tf = ((0, 0, 0), tf.transformations.quaternion_from_euler(0, 0, control_offset))
        if button_clicked:
            # Get pose corresponding to transform between stylus reference and current position.
            stylus_tf = listener.lookupTransform('/stylus_ref', '/stylus', rospy.Time(0))
            stylus_matrix = pm.toMatrix(pm.fromTf(stylus_tf))
            control_matrix = pm.toMatrix(pm.fromTf(control_tf))
            p = pm.toMsg(pm.fromMatrix(numpy.dot(control_matrix, stylus_matrix)))
        else:
            p = pm.toMsg(pm.fromTf(control_tf))
        
        # Simply scale this up a bit to increase the workspace.
        workspace = 4
        p.position.x = p.position.x * workspace
        p.position.y = p.position.y * workspace
        p.position.z = p.position.z * workspace

        update.pose.pose = p

        last_button_state = update.button_state

        # Publish feedback.
        update_pub.publish(update)
    except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
        rospy.logerr("Couldn't look up transform. These things happen...")
    def calib(self):

        self._left_gripper_pose = kdl.Frame()
        self._right_gripper_pose = kdl.Frame()
        self._tote_pose = kdl.Frame()

        while not rospy.is_shutdown():
            try:
                (trans, rot) = self._tf_listener.lookupTransform('/base', 'left_gripper', rospy.Time(0))
                self._left_gripper_pose = posemath.fromTf((trans, rot))

                (trans, rot) = self._tf_listener.lookupTransform('/base', 'right_gripper', rospy.Time(0))
                self._right_gripper_pose = posemath.fromTf((trans, rot))
                break
            except:
                rospy.logerr(rospy.get_name() + ': could not get left/right gripper poses for calibration of the tote')
                rospy.sleep(1.0)

        vl = copy.deepcopy(self._left_gripper_pose.p)
        vl[2] = 0.0

        vr = copy.deepcopy(self._right_gripper_pose.p)
        vr[2] = 0.0

        v = vr - vl

        # center on the lower edge of tote closest to the Baxter
        c = vl + 0.5*v

        # calculate the perpendicular vector
        v.Normalize()
        v_perp = kdl.Rotation.RotZ(0.5*np.pi)*v

        # calculate the center of the tote
        center = c + self._tote_width*0.5*v_perp
        self._tote_pose.p[0] = center[0]
        self._tote_pose.p[1] = center[1]
        self._tote_pose.p[2] = (self._left_gripper_pose.p[2] + self._right_gripper_pose.p[2])*0.5-self._tote_height


        # calculate the angle of rotation along the Z axis
        rotz_angle = np.arctan(v[0]/-v[1])
        rotz_angle = rotz_angle -np.pi*0.5
        R = kdl.Rotation.RPY(0, 0, rotz_angle)

        self._tote_pose.M = R

        return True
Esempio n. 16
0
    def process_grasp_msg(self, grasp_msg):
        """@brief - Attempt to make the adjustment specified by grasp_msg
        1. plan a path to the a place 15cm back from the new pose
        """
        print 'regular move:'
        print grasp_msg
        #release the object
        tp.open_barrett()

        #get the robot's current location and calculate the absolute tran after the adjustment
        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        handGoalInBase = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))

        try:
            hand_tf = pm.toTf(pm.fromMatrix(handGoalInBase))
            bc = tf.TransformBroadcaster()
            now = rospy.Time.now()
            bc.sendTransform(hand_tf[0], hand_tf[1], now, "hand_goal",
                             "armbase")
            self.global_data.listener.waitForTransform("armbase",
                                                       "arm_goal", now,
                                                       rospy.Duration(1.0))
            armtip_robot = self.global_data.listener.lookupTransform(
                'armbase', 'arm_goal', now)
            armGoalInBase = pm.toMatrix(pm.fromTf(armtip_robot))
        except Exception, e:
            handle_fatal('convert_cartesian_world_goal failed: error %s.' % e)
Esempio n. 17
0
 def getJarMarkerPose(self):
     try:
         self.listener.waitForTransform('torso_base', 'ar_marker_0', rospy.Time.now(), rospy.Duration(4.0))
         jar_marker = self.listener.lookupTransform('torso_base', 'ar_marker_0', rospy.Time(0))
     except:
         return None
     return pm.fromTf(jar_marker)
    def get_new_waypoint(self,obj):
        try:
            # TODO: make the snap configurable
            #(trans,rot) = self.listener.lookupTransform(obj,self.endpoint,rospy.Time(0))
            (eg_trans,eg_rot) = self.listener.lookupTransform(self.gripper_center,self.endpoint,rospy.Time(0))
            (og_trans,og_rot) = self.listener.lookupTransform(obj,self.gripper_center,rospy.Time(0))

            rospy.logwarn("gripper obj:" + str((og_trans, og_rot)))
            rospy.logwarn("endpoint gripper:" + str((eg_trans, eg_rot)))

            xyz, rpy = [], []
            for dim in og_trans:
                # TODO: test to see if we should re-enable this
                if abs(dim) < 0.0001:
                    xyz.append(0.)
                else:
                    xyz.append(dim)
            Rog = kdl.Rotation.Quaternion(*og_rot)
            for dim in Rog.GetRPY():
                rpy.append(np.round(dim / np.pi * 8.) * np.pi/8.)
            Rog_corrected = kdl.Rotation.RPY(*rpy)
            Vog_corrected = kdl.Vector(*xyz)
            Tog_corrected = kdl.Frame(Rog_corrected, Vog_corrected)

            rospy.logwarn(str(Tog_corrected) + ", " + str(Rog_corrected.GetRPY()))

            Teg = pm.fromTf((eg_trans, eg_rot))
            return pm.toMsg(Tog_corrected * Teg)
        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
            rospy.logerr('Failed to lookup transform from %s to %s'%(obj,self.endpoint))
        return None
Esempio n. 19
0
    def update(self,maxt=0.01):
        '''
        Look up what the world should look like, based on the provided arguments.
        "objs" should be a list of all possible objects that we might want to
        aggregate. They'll be saved in the "objs" dictionary. It's the role of the
        various options/policies to choose and use them intelligently.

        Parameters:
        -----------
        maxt: max duration used when waiting for transforms

        Returns:
        --------
        n/a

        Access via the self.observation member or the getPose() function.
        '''
        self.observation = {}
        for obj in self.objects_to_track:
            try:
                self.tf_listener.waitForTransform(self.base_link, obj, rospy.Time.now(), rospy.Duration(maxt))
                (trans, rot) = self.tf_listener.lookupTransform(
                    self.base_link, obj, rospy.Time(0.))
                self.observation[obj] = pm.fromTf((trans, rot))

            except (tf.LookupException,
                    tf.ConnectivityException,
                    tf.ExtrapolationException,
                    tf2.TransformException) as e:
                self.observation[obj] = None

        # Update 
        for actor in self.actors:
            actor.state = actor.getState()
            actor.state.t = rospy.Time.now().to_sec()
Esempio n. 20
0
    def tick(self):

        #TODO: figure out why this is bad
        #if not self.listener.frameExists(self.root):
        #    rospy.logerr("missing root: %s"%self.root)
        #    return
        self.t = rospy.Time.now()

        for name, urdf in self.urdfs.items():
            if self.objs[name] == None:
                operation = CollisionObject.ADD
            else:
                operation = CollisionObject.MOVE
            if not self.listener.frameExists(name):
                #rospy.logwarn("Frame %s does not exist"%name)
                continue
            try:
                t = self.listener.getLatestCommonTime(self.root, name)
            except tf.Exception as e:
                rospy.logerr(str(e))
                continue
            dt = (self.t - t).to_sec()
            if dt > self.max_dt:
                rospy.logwarn(
                    "object %s has not been observed in the last %f seconds" %
                    (name, dt))
                continue
            pose = self.listener.lookupTransform(self.root, name, t)
            pose = pm.fromTf(pose)
            co = _getCollisionObject(name, urdf, pose, operation)
            co.header.frame_id = self.root
            self.objs[name] = co
            self.co_pub.publish(co)
Esempio n. 21
0
 def transformPose(self, pose, targetFrame):
     with self._lock:
         tTargetFramePoseFrame = None
         for trial in range(10):
             self.checkPreemption()
             try:
                 tTargetFramePoseFrame = self._tfListener.lookupTransform(
                     targetFrame, pose.header.frame_id, rospy.Time(0))
                 break
             except (tf.LookupException, tf.ConnectivityException,
                     tf.ExtrapolationException):
                 rospy.sleep(0.5)
         if tTargetFramePoseFrame is None:
             raise ValueError('Could not transform pose from ' +
                              pose.header.frame_id + ' to ' + targetFrame)
         # Convert poses to KDL
         fTargetFramePoseFrame = posemath.fromTf(tTargetFramePoseFrame)
         fPoseFrameChild = posemath.fromMsg(pose.pose)
         # Compute transformation
         fTargetFrameChild = fTargetFramePoseFrame * fPoseFrameChild
         # Convert back to ROS message
         poseStampedTargetFrameChild = PoseStamped()
         poseStampedTargetFrameChild.pose = posemath.toMsg(
             fTargetFrameChild)
         poseStampedTargetFrameChild.header.stamp = rospy.Time.now()
         poseStampedTargetFrameChild.header.frame_id = targetFrame
         return poseStampedTargetFrameChild
Esempio n. 22
0
def transform_wrist_frame(T_tool, ft, tool_x_offset=0.0):
    '''
    :param T_tool: desired  *_gripper_tool_frame pose w.r.t. some reference frame. Can be of type kdl.Frame or geometry_msgs.Pose.
    :param ft: bool. True if the arm has a force-torque sensor
    :param tool_x_offset: (double) offset in tool length
    :return: T_torso_wrist. geometry_msgs.Pose type. Wrist pose w.r.t. same ref frame as T_tool
    '''

    if ft:
        T_wrist_tool = kdl.Frame(kdl.Rotation.Identity(), kdl.Vector(0.216 + tool_x_offset, 0.0, 0.0))

    else:
        T_wrist_tool = kdl.Frame(kdl.Rotation.Identity(), kdl.Vector(0.180 + tool_x_offset, 0.0, 0.0))

    if type(T_tool) is Pose:

        T_tool_ = posemath.fromMsg(T_tool)

    elif type(T_tool) is tuple and len(T_tool) is 2:
        T_tool_ = posemath.fromTf(T_tool)

    else:
        T_tool_ = T_tool


    T_wrist_ = T_tool_*T_wrist_tool.Inverse()

    T_wrist = posemath.toMsg(T_wrist_)

    return T_wrist
Esempio n. 23
0
    def getKDLtf(self, base_frame, frame):
#        try:
        pose = self.listener.lookupTransform(base_frame, frame, rospy.Time(0))
#        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
#            print "could not transform: ", base_frame, frame
#            return None
        return pm.fromTf(pose)
 def get_new_waypoint(self,obj):
     try:
         (trans,rot) = self.listener.lookupTransform(obj,self.endpoint,rospy.Time(0))
         return pm.toMsg(pm.fromTf((trans,rot)))
     except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
         rospy.logerr('Failed to lookup transform from %s to %s'%(obj,self.endpoint))
     return None
Esempio n. 25
0
    def get_waypoints(self,frame_type,predicates,transforms,names):
        self.and_srv.wait_for_service()

        type_predicate = PredicateStatement()
        type_predicate.predicate = frame_type
        type_predicate.params = ['*','','']

        res = self.and_srv([type_predicate]+predicates)

        print "Found matches: " + str(res.matching)
        #print res

        if (not res.found) or len(res.matching) < 1:
          return None

        poses = []
        for tform in transforms:
            poses.append(pm.fromMsg(tform))

        #print poses
        new_poses = []
        new_names = []

        for match in res.matching:
            try:
                (trans,rot) = self.listener.lookupTransform(self.world,match,rospy.Time(0))
                for (pose, name) in zip(poses,names):
                    #resp.waypoints.poses.append(pm.toMsg(pose * pm.fromTf((trans,rot))))
                    new_poses.append(pm.toMsg(pm.fromTf((trans,rot)) * pose))
                    new_names.append(match + "/" + name)
            except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
                rospy.logwarn('Could not find transform from %s to %s!'%(self.world,match))
        
        return (new_poses, new_names)
Esempio n. 26
0
def ground_area(cm, src, tf_x):
    """ Compute 4-corners of ground-plane from camera.

    Args:
        cm(image_geometry.PinholeCameraModel): camera model for image.
        src(np.ndarray): [4,2] array of four corners, formatted (u,v).
            Represents the area in the frame to compute the groundplane projection.
        tf_x(tuple): (txn,qxn) tuple for translation and rotation(quaternion).
            Refer to results from tf.TransformListener.lookupTransform()
    Returns:
        points(np.ndarray): [4,2] array of four corners, formatted (x,y)
            z value is implicitly zero w.r.t. the source tf frame.
    """

    txn, qxn = tf_x

    # K = 3x3 camera projection
    w, h = cm.width, cm.height
    M_x = pm.toMatrix(pm.fromTf(tf_x))

    cray = [cm.projectPixelTo3dRay(e)
            for e in src]  # [4,3] formatted (x,y,z) ??
    # convert ray to map coord
    mray = np.dot(cray, M_x[:3, :3].T)  # right-multiply rotation matrix

    # extend ray to groundplane
    l = -txn[2] / mray[:, 2]
    # need to flip z since mray is pointing downwards
    # i.e. same as mray[:,2].dot([0,0,-1]) which is the correct distance
    gray = l[:, np.newaxis] * mray
    return gray[:, :2] + np.reshape(txn[:2], [-1, 2])
 def getCameraPose(self):
     try:
         self.listener.waitForTransform('torso_base', "camera", rospy.Time.now(), rospy.Duration(4.0))
         T_B_C_tf = self.listener.lookupTransform('torso_base', "camera", rospy.Time(0))
     except:
         return None
     return pm.fromTf(T_B_C_tf)
Esempio n. 28
0
    def calib(self):

        self._left_gripper_pose = kdl.Frame()
        self._right_gripper_pose = kdl.Frame()
        self._camera_mount_pose = kdl.Frame()

        while not rospy.is_shutdown():
            try:
                (trans, rot) = self._tf_listener.lookupTransform(
                    '/base', 'left_gripper', rospy.Time(0))
                self._left_gripper_pose = posemath.fromTf((trans, rot))

                (trans, rot) = self._tf_listener.lookupTransform(
                    '/base', 'right_gripper', rospy.Time(0))
                self._right_gripper_pose = posemath.fromTf((trans, rot))
                break
            except:
                rospy.logerr(
                    rospy.get_name() +
                    ': could not get left/right gripper poses for calibration of the camera mount'
                )
                rospy.sleep(1.0)

        vl = copy.deepcopy(self._left_gripper_pose.p)
        vl[2] = 0.0

        vr = copy.deepcopy(self._right_gripper_pose.p)
        vr[2] = 0.0

        v = vr - vl

        # center point between the 2 tripods of the camera mount
        center = vl + 0.5 * v

        # discard the information about height (which is to be measured separately)
        self._camera_mount_pose.p[0] = center[0]
        self._camera_mount_pose.p[1] = center[1]
        self._camera_mount_pose.p[2] = -0.93

        # calculate the angle of rotation along the Z axis
        rotz_angle = np.arctan(v[0] / -v[1])
        rotz_angle = rotz_angle - np.pi * 0.5
        R = kdl.Rotation.RPY(0, 0, rotz_angle)

        self._camera_mount_pose.M = R

        return True
Esempio n. 29
0
 def transform(self):
   for f in self.tool_features.values() + self.world_features.values():
     try:
       frame = self.listener.lookupTransform(self.base_frame_id,
                                             f.frame_id, rospy.Time(0))
     except:
       continue
     f.transform(pm.fromTf(frame))
 def get_world_pose(self):
     self.__call__()
     self.listener.waitForTransform("/world", self.object_name,
                                    rospy.Time(0), rospy.Duration(10))
     return pm.toMsg(
         pm.fromTf(
             self.listener.lookupTransform("/world", self.object_name,
                                           rospy.Time(0))))
 def _get_transform(self, from_, to_):
      try:
          #rospy.loginfo("Waiting for the transform %s -> %s" % (from_, to_))
          self.tf_listener.waitForTransform(from_, to_, rospy.Time(0), rospy.Duration(5))
          return posemath.fromTf( self.tf_listener.lookupTransform(from_, to_, rospy.Time(0)) )
      except (tf.Exception):
          rospy.logdebug("Transform lookup from %s to %s failed." % (from_, to_))
      
      return None
Esempio n. 32
0
 def test_fromTf(self):
     transformer = Transformer(True, rospy.Duration(10.0))
     m = TransformStamped()
     m.header.frame_id = 'wim'
     m.child_frame_id = 'james'
     m.transform.translation.x = 2.71828183
     m.transform.rotation.w = 1.0
     transformer.setTransform(m)
     b = pm.fromTf(transformer.lookupTransform('wim', 'james', rospy.Time(0)))
def axis_marker(tw, id = 0, ns = 'twist'):
  """ make a marker message showing the instantaneous
      rotation axis of a twist message"""

  t = kdl.Twist(kdl.Vector(tw.linear.x,  tw.linear.y,  tw.linear.z),
                kdl.Vector(tw.angular.x, tw.angular.y, tw.angular.z))

  try:
    (x,     rot)  = listener.lookupTransform(target_frame, ref_frame, rospy.Time(0))
    (trans, rotp) = listener.lookupTransform(target_frame, ref_point, rospy.Time(0))
  except (tf.LookupException, tf.ConnectivityException):
    print 'tf exception!'
    return marker.create(id=id, ns=ns, action=Marker.DELETE)

  # put the twist in the right frame
  f = posemath.fromTf( (trans, rot) )
  t = f*t

  direction = t.rot
  location = t.rot * t.vel / kdl.dot(t.rot, t.rot)

  lr = t.rot.Norm()
  lt = t.vel.Norm()

  m = marker.create(id=id, ns=ns, type=Marker.CYLINDER)

  if lr < 0.0001 and lt < 0.0001:
    return marker.create(id=id, ns=ns, action=Marker.DELETE)

  if lr < 0.001:
    # very short axis, assume linear movement
    location = kdl.Vector(0,0,0)
    direction = t.vel
    if lt < min_length:
      direction *= min_length / lt
    m.type = Marker.CUBE
    m = marker.align(m, location, location + direction, 0.02)
  elif lr < min_length:
    direction = direction / lr * min_length
    m = marker.align(m, location - direction, location + direction, 0.02)
  elif lr > max_length:
    direction = direction / lr * max_length
    m = marker.align(m, location - direction, location + direction, 0.02) 
  else:
    #BAH! How do I make this better?
    m = marker.align(m, location - direction, location + direction, 0.02)

  m.header.frame_id = target_frame
  m.frame_locked = True

  if(use_colors):
    m.color = colors[id % len(colors)]
  else:
    m.color = ColorRGBA(0.3, 0.3, 0.3, 1)

  return m
Esempio n. 34
0
 def test_fromTf(self):
     transformer = Transformer(True, rospy.Duration(10.0))
     m = TransformStamped()
     m.header.frame_id = 'wim'
     m.child_frame_id = 'james'
     m.transform.translation.x = 2.71828183
     m.transform.rotation.w = 1.0
     transformer.setTransform(m)
     b = pm.fromTf(
         transformer.lookupTransform('wim', 'james', rospy.Time(0)))
Esempio n. 35
0
 def getCameraPose(self):
     try:
         self.listener.waitForTransform('torso_base', "camera",
                                        rospy.Time.now(),
                                        rospy.Duration(4.0))
         T_B_C_tf = self.listener.lookupTransform('torso_base', "camera",
                                                  rospy.Time(0))
     except:
         return None
     return pm.fromTf(T_B_C_tf)
    def calib(self):

        self._left_gripper_pose = kdl.Frame()
        self._right_gripper_pose = kdl.Frame()
        self._camera_mount_pose = kdl.Frame()

        while not rospy.is_shutdown():
            try:
                (trans, rot) = self._tf_listener.lookupTransform('/base', 'left_gripper', rospy.Time(0))
                self._left_gripper_pose = posemath.fromTf((trans, rot))

                (trans, rot) = self._tf_listener.lookupTransform('/base', 'right_gripper', rospy.Time(0))
                self._right_gripper_pose = posemath.fromTf((trans, rot))
                break
            except:
                rospy.logerr(rospy.get_name() + ': could not get left/right gripper poses for calibration of the camera mount')
                rospy.sleep(1.0)

        vl = copy.deepcopy(self._left_gripper_pose.p)
        vl[2] = 0.0

        vr = copy.deepcopy(self._right_gripper_pose.p)
        vr[2] = 0.0

        v = vr - vl

        # center point between the 2 tripods of the camera mount
        center = vl + 0.5*v

        # discard the information about height (which is to be measured separately)
        self._camera_mount_pose.p[0] = center[0]
        self._camera_mount_pose.p[1] = center[1]
        self._camera_mount_pose.p[2] = -0.93

        # calculate the angle of rotation along the Z axis
        rotz_angle = np.arctan(v[0]/-v[1])
        rotz_angle = rotz_angle - np.pi*0.5
        R = kdl.Rotation.RPY(0, 0, rotz_angle)

        self._camera_mount_pose.M = R

        return True
Esempio n. 37
0
 def get_new_waypoint(self, obj):
     try:
         (trans,
          rot) = self.listener.lookupTransform(obj, self.endpoint,
                                               rospy.Time(0))
         return pm.toMsg(pm.fromTf((trans, rot)))
     except (tf.LookupException, tf.ConnectivityException,
             tf.ExtrapolationException):
         rospy.logerr('Failed to lookup transform from %s to %s' %
                      (obj, self.endpoint))
     return None
    def test_custom_reference_relative_move(self):
        """ Test if relative moves work with custom reference frame as expected

            Test sequence:
                1. Get the test data and publish reference frame via TF.
                2. Create a relative Ptp with and without custom reference.
                3. convert the goals.
                4. Evaluate the results.
        """
        rospy.loginfo("test_custom_reference_frame_relative")

        self.robot.move(Ptp(goal=[0, 0, 0, 0, 0, 0]))

        # get and transform test data
        ref_frame = self.test_data.get_pose("Blend_1_Mid", PLANNING_GROUP_NAME)
        goal_pose_bf = self.test_data.get_pose("Blend_1_Start", PLANNING_GROUP_NAME)
        goal_pose_bf_tf = pm.toTf(pm.fromMsg(goal_pose_bf))
        ref_frame_tf = pm.toTf(pm.fromMsg(ref_frame))

        rospy.sleep(rospy.Duration.from_sec(0.5))

        # init
        base_frame_name = self.robot._robot_commander.get_planning_frame()
        time_tf = rospy.Time.now()
        goal_pose_in_rf = [[0, 0, 0], [0, 0, 0, 1]]

        try:
            self.tf.sendTransform(goal_pose_bf_tf[0], goal_pose_bf_tf[1], time_tf,
                                  "rel_goal_pose_bf", base_frame_name)
            self.tf.sendTransform(ref_frame_tf[0], ref_frame_tf[1], time_tf,
                                  "ref_rel_frame", base_frame_name)

            self.tf_listener.waitForTransform("rel_goal_pose_bf", "ref_rel_frame", time_tf, rospy.Duration(2, 0))

            rospy.sleep(rospy.Duration.from_sec(0.1))

            goal_pose_in_rf = self.tf_listener.lookupTransform("ref_rel_frame", "rel_goal_pose_bf", time_tf)
        except:
            rospy.logerr("Failed to setup transforms for test!")

        goal_pose_rf_msg = pm.toMsg(pm.fromTf(goal_pose_in_rf))

        # move to initial position and use relative move to reach goal
        self.robot.move(Ptp(goal=ref_frame))

        self.tf.sendTransform(ref_frame_tf[0], ref_frame_tf[1], rospy.Time.now(), "ref_rel_frame", base_frame_name)
        rospy.sleep(rospy.Duration.from_sec(0.1))
        self.tf_listener.waitForTransform(base_frame_name, "ref_rel_frame", rospy.Time(0), rospy.Duration(1, 0))

        ptp = Ptp(goal=goal_pose_rf_msg, reference_frame="ref_rel_frame", relative=True)
        req = ptp._cmd_to_request(self.robot)

        self.assertIsNotNone(req)
        self._analyze_request_pose(TARGET_LINK_NAME, goal_pose_bf, req)
Esempio n. 39
0
    def test_roundtrip(self):
        c = Frame()

        d = pm.fromMsg(pm.toMsg(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromMatrix(pm.toMatrix(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromTf(pm.toTf(c))
        self.assertEqual(repr(c), repr(d))
Esempio n. 40
0
    def test_roundtrip(self):
        c = Frame()

        d = pm.fromMsg(pm.toMsg(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromMatrix(pm.toMatrix(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromTf(pm.toTf(c))
        self.assertEqual(repr(c), repr(d))
Esempio n. 41
0
  def _transform(self):
    ''' uses tf to compute the transform for the twists '''
    try:
      (x,     rot)  = listener.lookupTransform(self.target_frame, self.ref_frame, rospy.Time(0))
      (trans, rotp) = listener.lookupTransform(self.target_frame, self.ref_point, rospy.Time(0))
    except (tf.LookupException, tf.ConnectivityException) as e:
      print 'tf exception!' + str(e)
      return kdl.Frame()
 
    # put the twist in the right frame
    return posemath.fromTf( (trans, rot) )
Esempio n. 42
0
 def pose_callback(self, pose):
     if self.T_cam2base is None:
         try:
             tfpose = self.tf_listener.lookupTransform('/camera', '/base_link', rospy.Time(0))
         except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
             rospy.loginfo("failed to find tf between base_link and camera")
             return
         self.T_cam2base = pm.toMatrix(pm.fromTf(tfpose))
         self.t_cam2base = self.T_cam2base[0:3, 3]
         self.R_cam2base = self.T_cam2base[0:3, 0:3].dot(ez.eulerZYX(0.05, -0.13, 0))
     self.compute_pose_label(self.label)
 def get_odom(self):
     """ Get Odometry Information via TF, returns None if fails """
     try:
         pose_tf = self.tfl_.lookupTransform('base_link', 'odom',
                                             rospy.Time(0))
     except tf.Exception as e:
         rospy.loginfo_throttle(1.0, 'Failed TF Transform : {}'.format(e))
         return None
     msg = pm.toMsg(pm.fromTf(pose_tf))
     pose = self.to_pose2d(msg)
     return pose
Esempio n. 44
0
    def compute_net_transform(self, base_pose):

        base_to_odom = self.tfl.lookupTransform("/base_footprint", "/odom_combined", rospy.Time())

        bto = pm.fromTf(base_to_odom)
        

        net_t = base_pose * bto

        print "Setting "
        print pm.toMsg(net_t)
        self.broad.transform = pm.toTf(net_t)
    def _get_transform(self, from_, to_):
        try:
            #rospy.loginfo("Waiting for the transform %s -> %s" % (from_, to_))
            self.tf_listener.waitForTransform(from_, to_, rospy.Time(0),
                                              rospy.Duration(5))
            return posemath.fromTf(
                self.tf_listener.lookupTransform(from_, to_, rospy.Time(0)))
        except (tf.Exception):
            rospy.logdebug("Transform lookup from %s to %s failed." %
                           (from_, to_))

        return None
Esempio n. 46
0
 def align_pose(self, pose):
     objectInCamera = pm.toMatrix(pm.fromMsg(pose))
     ModelRecManager.tf_listener.waitForTransform("/world", "/camera_rgb_optical_frame", rospy.Time(0), rospy.Duration(5))
     cameraInWorld = pm.toMatrix(pm.fromTf(ModelRecManager.tf_listener.lookupTransform("/world", "/camera_rgb_optical_frame",rospy.Time(0))))
     objectInWorld = dot(cameraInWorld, objectInCamera)
     """45 degrees rotated around z axis"""
     objectInWorld[0:3,0:3] = mat([[0.7071,-0.7071,0],
                                   [0.7071,0.7071,0],
                                   [0,0,1]]);
     worldInCamera = linalg.inv(cameraInWorld)
     objectInCameraModified = dot(worldInCamera, objectInWorld)
     return pm.toMsg(pm.fromMatrix(objectInCameraModified))
Esempio n. 47
0
    def calibrate(self, req):

        print "%s <- %s, %s <- %s" % (self.camera_link, self.marker,
                                      self.base_link, self.ee_marker)

        T_cm = pm.fromTf(self.lookup(self.camera_link, self.marker))
        T_be = pm.fromTf(self.lookup(self.base_link, self.ee_marker))

        print T_cm
        print T_be

        T = T_cm * T_be.Inverse()
        (self.trans, self.rot) = pm.toTf(T)

        print(self.trans, self.rot)

        print self.save_service(type="handeye_calibration",
                                id=self.id,
                                text=yaml.dump(pm.toMsg(T)))

        return EmptyResponse()
    def updateTransformations(self):

        pose = self.listener.lookupTransform('/'+self.prefix+'_HandPalmLink', '/'+self.prefix+'_HandFingerThreeKnuckleThreeLink', rospy.Time(0))
        self.T_E_F = pm.fromTf(pose)
        self.T_F_E = self.T_E_F.Inverse()

        pose = self.listener.lookupTransform('/'+self.prefix+'_HandPalmLink', '/'+self.prefix+'_HandFingerOneKnuckleThreeLink', rospy.Time(0))
        self.T_E_F13 = pm.fromTf(pose)
        self.T_F13_E = self.T_E_F13.Inverse()

        pose = self.listener.lookupTransform('/'+self.prefix+'_HandPalmLink', '/'+self.prefix+'_HandFingerTwoKnuckleThreeLink', rospy.Time(0))
        self.T_E_F23 = pm.fromTf(pose)
        self.T_F23_E = self.T_E_F23.Inverse()

        pose = self.listener.lookupTransform('/'+self.prefix+'_HandPalmLink', '/'+self.prefix+'_HandFingerThreeKnuckleThreeLink', rospy.Time(0))
        self.T_E_F33 = pm.fromTf(pose)
        self.T_F33_E = self.T_E_F33.Inverse()

        pose = self.listener.lookupTransform(self.prefix+'_HandPalmLink', self.prefix+'_HandFingerOneKnuckleOneLink', rospy.Time(0))
        self.T_E_F11 = pm.fromTf(pose)

        pose = self.listener.lookupTransform(self.prefix+'_HandPalmLink', self.prefix+'_HandFingerTwoKnuckleOneLink', rospy.Time(0))
        self.T_E_F21 = pm.fromTf(pose)

        self.T_E_F31 = PyKDL.Frame()
Esempio n. 49
0
    def publish_poses(self):

        while not rospy.is_shutdown():

            try:
                self._F_left_gripper_base_left_gripper = posemath.fromTf(
                    self._tf_listener.lookupTransform('left_gripper_base',
                                                      'left_gripper',
                                                      rospy.Time(0)))

                self._F_right_gripper_base_right_gripper = posemath.fromTf(
                    self._tf_listener.lookupTransform('right_gripper_base',
                                                      'right_gripper',
                                                      rospy.Time(0)))

            except (tf.LookupException, tf.ConnectivityException,
                    tf.ExtrapolationException):
                rospy.logwarn(
                    '[bin_pose_publisher]: haven\'t received gripper frames')
                rospy.sleep(1.0)
                continue

            for pose in self._bin_poses_tf:
                F = posemath.fromTf(
                    (tuple(pose['translation']), tuple(pose['rotation'])))
                # left arm
                if pose['bin_pose_id'].rfind('left_arm') > 0:
                    translation, rotation = posemath.toTf(
                        F * self._F_left_gripper_base_left_gripper)
                # right arm
                else:
                    translation, rotation = posemath.toTf(
                        F * self._F_right_gripper_base_right_gripper)

                self._tf_broadcaster.sendTransform(translation=translation,
                                                   rotation=rotation,
                                                   time=rospy.Time.now(),
                                                   child=pose['bin_pose_id'],
                                                   parent=pose['bin'])
            self._rate.sleep()
def pregrasp_object(global_data,
                    object_name,
                    adjust_height,
                    grasp_tran,
                    object_height_col=2,
                    pregrasp_dist=-.05,
                    object_height_adjustment=0.0,
                    run_it=True):
    """@brief - Move the hand to the pregrasp position. This procedure is suprisingly intricate.

    Given the object filename and the grasp transform, this transforms the grasp to world coordinates
    then runs a planner to avoid collisions in moving the arm the long distance to the
    object. This uses the scene graph, which is currently provided by a camera.

    Returns whether the pregrasp location was reachable, the filename in which the trajectory from OpenRave
    is stored, and the list of the dof values that are contained in the trajectory.
    """
    """ Sets up object location in openrave -- this code is all to compensate for the current object localization
    method only knowing the location of the base of the object - the height of the object's origin from the base
    must be used to adjust it's location in openrave before planning. 
    """

    print "Pregrasping object: %s" % (object_name)

    #Get the object transform in the world.
    obj_tran = pm.toMatrix(
        pm.fromTf(
            global_data.listener.lookupTransform("/world", object_name,
                                                 rospy.Time(0))))

    if (obj_tran == []):
        warn("Scene graph in TF is broken! Most\
           likely, the object transform could not be found.")
        return [], [], [], []
    """Set up and visualize the grasp transforms"""

    pregrasp_tran = get_pregrasp_tran_from_tran(grasp_tran, pregrasp_dist)
    publish_grasp_tran(pregrasp_tran)
    final_tran = dot(obj_tran, pregrasp_tran)
    publish_grasp_tran(final_tran)

    success = False
    """Run the planner"""
    plan = []
    j = []
    try:
        success, trajectory_filename, dof_list, j = run_cbirrt_with_tran(
            global_data.or_env, final_tran)

    except Exception, e:
        warn("Failed running planner with\
                       error %s" % e)
Esempio n. 51
0
 def get_heading(self, source='odom', target='base_link'):
     # pts = [N,2]
     try:
         #T_tf = self.tfl_.lookupTransform(target, source, rospy.Time(0))
         T_tf = self.tfl_.lookupTransform(source, target, rospy.Time(0))
     except tf.Exception as e:
         rospy.logerr('Scan tf failed : {}'.format(e))
         return None
     T_msg = pm.toMsg(pm.fromTf(T_tf))
     x, y = T_msg.position.x, T_msg.position.y
     q = 2.0 * np.arctan2(T_msg.orientation.z, T_msg.orientation.w)
     #pts_odom = np.dot(pts, U.R(q).T) + np.reshape([x,y], [-1,2])
     return q
Esempio n. 52
0
 def getMarkerPose(self, marker_id, wait = True, timeBack = None):
     try:
         marker_name = 'ar_marker_'+str(int(marker_id))
         if wait:
             self.listener.waitForTransform('torso_base', marker_name, rospy.Time.now(), rospy.Duration(4.0))
         if timeBack != None:
             time = rospy.Time.now() - rospy.Duration(timeBack)
         else:
             time = rospy.Time(0)
         jar_marker = self.listener.lookupTransform('torso_base', marker_name, time)
     except:
         return None
     return pm.fromTf(jar_marker)
Esempio n. 53
0
    def calibrate(self,req):

        print "%s <- %s, %s <- %s"%(self.camera_link,self.marker,self.base_link,self.ee_marker)

        T_cm = pm.fromTf(self.lookup(self.camera_link,self.marker))
        T_be = pm.fromTf(self.lookup(self.base_link,self.ee_marker))

        print T_cm
        print T_be
        
        T = T_cm * T_be.Inverse()
        (self.trans, self.rot) = pm.toTf(T)

        print (self.trans, self.rot)


        print self.save_service(
            type="handeye_calibration",
            id=self.id,
            text=yaml.dump(pm.toMsg(T)))

        return EmptyResponse()
Esempio n. 54
0
    def get_gripper_frames(self):

        while not rospy.is_shutdown() and not self._got_frames:

            try:
                self._F_left_gripper_base_left_gripper = posemath.fromTf(
                    self._tf_listener.lookupTransform('left_gripper_base',
                                                      'left_gripper',
                                                      rospy.Time(0)))

                self._F_right_gripper_base_right_gripper = posemath.fromTf(
                    self._tf_listener.lookupTransform('right_gripper_base',
                                                      'right_gripper',
                                                      rospy.Time(0)))

                self._got_frames = True

            except (tf.LookupException, tf.ConnectivityException,
                    tf.ExtrapolationException):
                rospy.logwarn(
                    '[bin_pose_publisher]: haven\'t received gripper frames')
                rospy.sleep(1.0)
                continue
Esempio n. 55
0
 def smartmove_place_cb(self, req):
     stamp = self.acquire()
     distance = req.backoff
     T_base_world = pm.fromTf(self.listener.lookupTransform(self.world,self.base_link,rospy.Time(0)))
     pose = req.pose
     T = T_base_world.Inverse()*pm.fromMsg(pose)
     # Create list of waypoints for a place action
     list_of_waypoints = [(0.,T,req.name,str(req.name)+"_goal")]
     return self.smartmove_multipurpose_gripper(stamp,
             list_of_waypoints, # list of waypoints
             distance, # backup distance
             self.detach, # detach object
             req.vel, req.accel,
             False, # is backup in table frame
             )
Esempio n. 56
0
def goto(kdl_kin, pub, listener, trans, rot): 

  try:
    T = pm.fromTf((trans, rot))

    q0 = [-1.0719114121799995, -1.1008140645600006, 1.7366724169200003,
            -0.8972388608399999, 1.25538042294, -0.028902652380000227,]

    # DEFAULT
    objt, objr = ((0.470635159016, 0.0047549889423, -0.428045094013),(0,0,0,1))
    T_orig = pm.fromTf((objt,objr))
    # MOVEd
    objt, objr = ((0.52, 0.00, -0.43),(0,0,0,1))
    T_new = pm.fromTf((objt,objr))

    T_pose = pm.toMatrix(T)
    Q = kdl_kin.inverse(T_pose, q0)

    print "----------------------------"
    print "[ORIG] Closest joints =", Q

    msg = JointState(name=CONFIG['joints'], position=Q)
    pub.publish(msg)
    rospy.sleep(0.2)

    T_goal = T_orig.Inverse() * T
    T2 = T_new * T_goal
    T2_pose = pm.toMatrix(T2)
    Q = kdl_kin.inverse(T2_pose, q0)
    print "[NEW] Closest joints =", Q
    msg = JointState(name=CONFIG['joints'], position=Q)
    pub.publish(msg)
    rospy.sleep(0.2)

  except  (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException), e:
    pass
Esempio n. 57
0
def testIK():
    rate = rospy.Rate(10)
    while not rospy.is_shutdown():
        tf_listener.waitForTransform("base", "link_camera", rospy.Time(), rospy.Duration(10.0))
        targetFrame_tf = tf_listener.lookupTransform('base', 'link_camera', rospy.Time(0))
        
        targetFrame = posemath.fromTf(targetFrame_tf)

        print("Input quaternion: {}".format(targetFrame.M.GetQuaternion()))
        print("Input vector: {}".format(targetFrame.p))
        
        jointAngles = solveIK(targetFrame)

    jointAngles = solveIK(targetFrame)
    print("sent!")
 def align_pose(self, pose):
     objectInCamera = pm.toMatrix(pm.fromMsg(pose))
     ModelRecManager.tf_listener.waitForTransform(
         "/world", "/camera_rgb_optical_frame", rospy.Time(0),
         rospy.Duration(5))
     cameraInWorld = pm.toMatrix(
         pm.fromTf(
             ModelRecManager.tf_listener.lookupTransform(
                 "/world", "/camera_rgb_optical_frame", rospy.Time(0))))
     objectInWorld = dot(cameraInWorld, objectInCamera)
     """45 degrees rotated around z axis"""
     objectInWorld[0:3, 0:3] = mat([[0.7071, -0.7071, 0],
                                    [0.7071, 0.7071, 0], [0, 0, 1]])
     worldInCamera = linalg.inv(cameraInWorld)
     objectInCameraModified = dot(worldInCamera, objectInWorld)
     return pm.toMsg(pm.fromMatrix(objectInCameraModified))
    def get_transform(self):
        '''
        Gets transform between robot base frame and gripper base frame
        @return:
        '''

        while not rospy.is_shutdown():
            try:
                (trans, rot) = self._tf_listener.lookupTransform(self._robot_base_frame, self._gripper_base_frame, rospy.Time(0))
                break
            except:
                rospy.logerr(rospy.get_name() + ': could not get transform between ' +
                             self._robot_base_frame + ' and  ' + self._gripper_base_frame)
                rospy.sleep(1.0)

        return posemath.fromTf((trans, rot))