def go_x(self, x, speed):
        print "go x called!"
        vel = speed * np.sign(x)
        self.tl.waitForTransform("base_footprint", "odom_combined", rospy.Time(), rospy.Duration(10))
        p0_base = tfu.transform("base_footprint", "odom_combined", self.tl)
        r = rospy.Rate(100)

        while not rospy.is_shutdown():
            pcurrent_base = tfu.transform("base_footprint", "odom_combined", self.tl)
            relative_trans = np.linalg.inv(p0_base) * pcurrent_base
            dist_moved = np.linalg.norm(relative_trans[0:3, 3])
            print "%s" % str(dist_moved)

            if dist_moved > np.abs(x):
                rospy.loginfo("stopped! error %f" % (np.abs(dist_moved - np.abs(x))))
                break

            tw = gm.Twist()
            tw.linear.x = vel
            tw.linear.y = 0
            tw.linear.z = 0
            tw.angular.x = 0
            tw.angular.y = 0
            tw.angular.z = 0

            self.tw_pub.publish(tw)
            r.sleep()
Exemple #2
0
    def go_x(self, x, speed):
        print 'go x called!'
        vel = speed * np.sign(x)
        self.tl.waitForTransform('base_footprint', 'odom_combined',
                                 rospy.Time(), rospy.Duration(10))
        p0_base = tfu.transform('base_footprint', 'odom_combined', self.tl)
        r = rospy.Rate(100)

        while not rospy.is_shutdown():
            pcurrent_base = tfu.transform('base_footprint', 'odom_combined',
                                          self.tl)
            relative_trans = np.linalg.inv(p0_base) * pcurrent_base
            dist_moved = np.linalg.norm(relative_trans[0:3, 3])
            print "%s" % str(dist_moved)

            if dist_moved > np.abs(x):
                rospy.loginfo('stopped! error %f' %
                              (np.abs(dist_moved - np.abs(x))))
                break

            tw = gm.Twist()
            tw.linear.x = vel
            tw.linear.y = 0
            tw.linear.z = 0
            tw.angular.x = 0
            tw.angular.y = 0
            tw.angular.z = 0

            self.tw_pub.publish(tw)
            r.sleep()
    def approach_location(self, point_bl, coarse_stop, fine_stop, voi_radius=.2):
        point_dist = np.linalg.norm(point_bl[0:2,0])
        rospy.loginfo('approach_location: Point is %.3f away.' % point_dist)
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_map = tfu.transform_points(map_T_base_link, point_bl)

        dist_theshold = coarse_stop + .1
        if point_dist > dist_theshold:
            rospy.loginfo('approach_location: Point is greater than %.1f m away (%.3f).  Driving closer.' % (dist_theshold, point_dist))
            rospy.loginfo('approach_location: point_bl ' + str(point_bl.T))

            ret = self.drive_approach_behavior(point_bl, dist_far=coarse_stop)
            base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
            point_bl_t1 = tfu.transform_points(base_link_T_map, point_map)
            if ret != 3:
                dist_end = np.linalg.norm(point_bl_t1[0:2,0])
                if dist_end > dist_theshold:
                    rospy.logerr('approach_location: drive_approach_behavior failed! %.3f' % dist_end)
                    self.robot.sound.say("I am unable to navigate to that location")
                    return False, 'failed'

            ret = self.approach_perpendicular_to_surface(point_bl_t1, voi_radius=voi_radius, dist_approach=fine_stop)
            if ret != 3:
                rospy.logerr('approach_location: approach_perpendicular_to_surface failed!')
                return False, 'failed'

            self.robot.sound.say('done')
            rospy.loginfo('approach_location: DONE DRIVING!')
            return True, 'done'
        else:
            return False, 'ignored'
Exemple #4
0
    def run(self):
        r = rospy.Rate(50)
        while not rospy.is_shutdown():
            try:
                common_link = '/base_link'
                c_T_rgrip = tfu.transform(common_link, '/r_gripper_tool_frame',
                                          self.tflistener)
                c_T_lgrip = tfu.transform(common_link, '/l_gripper_tool_frame',
                                          self.tflistener)
                gripper_right_c = np.matrix(
                    tr.translation_from_matrix(
                        c_T_rgrip * tr.translation_matrix([0, 0, 0.])))
                gripper_left_c = np.matrix(
                    tr.translation_from_matrix(
                        c_T_lgrip * tr.translation_matrix([0, 0, 0.])))
                look_at_point_c = ((gripper_right_c + gripper_left_c) /
                                   2.0).A1.tolist()

                g = PointHeadGoal()
                g.target.header.frame_id = '/base_link'
                g.target.point = Point(*look_at_point_c)
                g.min_duration = rospy.Duration(1.0)
                g.max_velocity = 10.
                self.head_client.send_goal(g)
                self.head_client.wait_for_result(rospy.Duration(1.))
                r.sleep()
            except tf.LookupException, e:
                print e
Exemple #5
0
 def omni_T_torso(self, torso_mat):
     l0_mat = tfu.transform(self.omni_name + '_center', '/torso_lift_link',
                            self.tflistener) * torso_mat
     l0_t = (np.array(tr.translation_from_matrix(l0_mat)) /
             np.array(self.scale_omni_l0)).tolist()
     l0_q = tr.quaternion_from_matrix(l0_mat)
     omni_pt_mat = tfu.transform(self.omni_name, self.omni_name + '_center',
                                 self.tflistener) * tfu.tf_as_matrix(
                                     (l0_t, l0_q))
     return tfu.matrix_as_tf(omni_pt_mat)
 def move_base_planner(self, trans, rot):
     #pdb.set_trace()
     p_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), np.matrix(trans).T)
     #Do this to clear out any hallucinated obstacles
     self.turn_to_point(p_bl)
     rvalue = self.robot.base.set_pose(trans, rot, '/map', block=True)
     p_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), np.matrix(trans).T)
     #pdb.set_trace()
     self.robot.base.move_to(p_bl[0:2,0], True)
     t_end, r_end = self.robot.base.get_pose()
     return rvalue==3, np.linalg.norm(t_end[0:2] - np.array(trans)[0:2])
Exemple #7
0
    def joint_state_cb(self, jmsg):
        tdict = {}

        # good for display purposes (independent of torso link etc)
        tdict['bf_T_rtip'] = tfu.transform('/base_footprint', self.ftip_frames[0], self.tflistener)
        tdict['bf_T_ltip'] = tfu.transform('/base_footprint', self.ftip_frames[1], self.tflistener)

        # want FK from torso
        tdict['torso_T_rtip'] = tfu.transform('/torso_lift_link', self.ftip_frames[0], self.tflistener)
        tdict['torso_T_ltip'] = tfu.transform('/torso_lift_link', self.ftip_frames[1], self.tflistener)

        #self.jstate_msgs.append(SimpleJointStateMsg(jmsg.header, tdict))
        self.last_jstate_time = time.time()
    def go_xy(self, target_base, tolerance=.01, max_speed=.1, func=None):
        k = 2.
        self.tl.waitForTransform('base_footprint', 'odom_combined', rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)

        od_T_bf = tfu.transform('odom_combined', 'base_footprint', self.tl)
        target_odom = (od_T_bf * np.row_stack([target_base, np.matrix([0,1.]).T]))[0:2,0]
        #print 'target base', target_base.T
        #print 'target odom', target_odom.T

        robot_odom = np.matrix(tfu.transform('odom_combined', 'base_footprint', self.tl)[0:2,3])
        diff_odom  = target_odom - robot_odom
        mag        = np.linalg.norm(diff_odom)
        rospy.loginfo('go_xy: error %s' % str(mag))
        while not rospy.is_shutdown():
            robot_odom = np.matrix(tfu.transform('odom_combined', 'base_footprint', self.tl)[0:2,3])
            #rospy.loginfo('go_xy: odom %s' % str(robot_odom.T))

            diff_odom  = target_odom - robot_odom
            mag        = np.linalg.norm(diff_odom)
            if func != None:
                func(diff_odom)
            if mag < tolerance:
                break

            if self.go_xy_server.is_preempt_requested():
                self.go_xy_server.set_preempted()
                break

            direc    = diff_odom / mag
            speed = min(mag * k, max_speed)
            vel_odom = direc * speed
            #vel_odom = direc * mag * k
            #print mag*k, max_speed, speed

            bf_T_odom = tfu.transform('base_footprint', 'odom_combined', self.tl)
            vel_base = bf_T_odom * np.row_stack([vel_odom, np.matrix([0,0.]).T])
            #pdb.set_trace()
            #rospy.loginfo('vel_base %f %f' % (vel_base[0,0], vel_base[1,0]))

            tw = gm.Twist()
            tw.linear.x = vel_base[0,0]
            tw.linear.y = vel_base[1,0]
            #rospy.loginfo('commanded %s' % str(tw))
            self.tw_pub.publish(tw)
            rate.sleep()

        rospy.loginfo('go_xy: finished go_xy %f' % np.linalg.norm(diff_odom))
        return diff_odom
Exemple #9
0
    def approach_perpendicular_to_surface(self, point_bl, voi_radius,
                                          dist_approach):
        map_T_base_link0 = tfu.transform('map', 'base_link', self.tf_listener)
        point_map0 = tfu.transform_points(map_T_base_link0, point_bl)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)

        point_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), \
                                        point_map0)
        point_cloud_bl = self.laser_scan.scan(math.radians(180.),
                                              math.radians(-180.), 2.5)
        point_cloud_np_bl = ru.pointcloud_to_np(point_cloud_bl)
        rospy.loginfo('approach_perpendicular_to_surface: pointcloud size %d' \
                % point_cloud_np_bl.shape[1])
        voi_points_bl, limits_bl = i3d.select_rect(point_bl, voi_radius,
                                                   voi_radius, voi_radius,
                                                   point_cloud_np_bl)
        #TODO: use closest plane instead of closest points determined with KDTree
        normal_bl = i3d.calc_normal(voi_points_bl)
        point_in_front_mechanism_bl = point_bl + normal_bl * dist_approach
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_in_front_mechanism_map = tfu.transform_points(
            map_T_base_link, point_in_front_mechanism_bl)

        #Navigate to point (TODO: check for collisions)
        point_map = tfu.transform_points(map_T_base_link, point_bl)
        t_current_map, r_current_map = self.robot.base.get_pose()
        rospy.loginfo('approach_perpendicular_to_surface: driving for %.3f m to front of surface' \
                % np.linalg.norm(t_current_map[0:2] - point_in_front_mechanism_map[0:2,0].T))
        #pdb.set_trace()
        rvalue = self.robot.base.set_pose(
            point_in_front_mechanism_map.T.A1.tolist(), r_current_map, 'map')
        if rvalue != 3:
            return rvalue

        t1_current_map, r1_current_map = self.robot.base.get_pose()
        rospy.loginfo(
            'approach_perpendicular_to_surface: %.3f m away from from of surface'
            % np.linalg.norm(t1_current_map[0:2] -
                             point_in_front_mechanism_map[0:2, 0].T))

        #Rotate to face point (TODO: check for collisions)
        base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
        point_bl = tfu.transform_points(base_link_T_map, point_map)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)
        time.sleep(2.)

        return rvalue
Exemple #10
0
    def go_xy(self, target_base, tolerance=.01, max_speed=.1, func=None):
        k = 2.
        self.tl.waitForTransform('base_footprint', 'odom_combined',
                                 rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)

        od_T_bf = tfu.transform('odom_combined', 'base_footprint', self.tl)
        target_odom = (
            od_T_bf *
            np.row_stack([target_base, np.matrix([0, 1.]).T]))[0:2, 0]
        #print 'target base', target_base.T
        #print 'target odom', target_odom.T

        while not rospy.is_shutdown():
            robot_odom = np.matrix(
                tfu.transform('odom_combined', 'base_footprint', self.tl)[0:2,
                                                                          3])
            #rospy.loginfo('odom %s' % str(robot_odom.T))

            diff_odom = target_odom - robot_odom
            mag = np.linalg.norm(diff_odom)
            #rospy.loginfo('error %s' % str(mag))
            if func != None:
                func(diff_odom)
            if mag < tolerance:
                break

            direc = diff_odom / mag
            speed = min(mag * k, max_speed)
            vel_odom = direc * speed
            #vel_odom = direc * mag * k
            #print mag*k, max_speed, speed

            bf_T_odom = tfu.transform('base_footprint', 'odom_combined',
                                      self.tl)
            vel_base = bf_T_odom * np.row_stack(
                [vel_odom, np.matrix([0, 0.]).T])
            #pdb.set_trace()
            #rospy.loginfo('vel_base %f %f' % (vel_base[0,0], vel_base[1,0]))

            tw = gm.Twist()
            tw.linear.x = vel_base[0, 0]
            tw.linear.y = vel_base[1, 0]
            #rospy.loginfo('commanded %s' % str(tw))
            self.tw_pub.publish(tw)
            rate.sleep()
        rospy.loginfo('finished go_xy %f' % np.linalg.norm(diff_odom))
        return diff_odom
Exemple #11
0
    def go_angle(self,
                 target_odom,
                 tolerance=math.radians(5.),
                 max_ang_vel=math.radians(20.),
                 func=None):
        self.tl.waitForTransform('base_footprint', 'odom_combined',
                                 rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)
        k = math.radians(80)

        #current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint', 'odom_combined', self.tl)[0:3, 0:3], 'sxyz')[2]
        #target_odom = current_ang_odom + delta_ang

        while not rospy.is_shutdown():
            robot_odom = tfu.transform('base_footprint', 'odom_combined',
                                       self.tl)
            current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3],
                                                    'sxyz')[2]
            diff = ut.standard_rad(current_ang_odom - target_odom)
            p = k * diff
            #print diff
            if func != None:
                func(diff)
            if np.abs(diff) < tolerance:
                break

            tw = gm.Twist()
            vels = [p, np.sign(p) * max_ang_vel]
            tw.angular.z = vels[np.argmin(np.abs(vels))]
            #rospy.loginfo('diff %.3f vel %.3f' % (math.degrees(diff), math.degrees(tw.angular.z)))
            self.tw_pub.publish(tw)
            #rospy.loginfo('commanded %s' % str(tw))
            rate.sleep()
        rospy.loginfo('finished %.3f' % math.degrees(diff))
        return diff
Exemple #12
0
    def fk(self,
           joint_poses_mat,
           frame='torso_lift_link',
           sol_link=None,
           use_tool_frame=True):
        if sol_link == None:
            sol_link = self.ik_frame

        fk_req = ks.GetPositionFKRequest()
        fk_req.header.frame_id = frame
        fk_req.fk_link_names = [sol_link]
        fk_req.robot_state.joint_state.name = self.fk_info_resp.kinematic_solver_info.joint_names
        fk_req.robot_state.joint_state.position = joint_poses_mat.T.A1.tolist()
        fk_resp = self._fk(fk_req)

        solframe_T_wr = tfu.pose_as_matrix(fk_resp.pose_stamped[0].pose)
        if not use_tool_frame:
            return solframe_T_wr
        else:
            #print 'redoing'
            self.tflistener.waitForTransform(self.tool_frame, sol_link,
                                             rospy.Time(), rospy.Duration(10))
            wr_T_toolframe = tfu.transform(sol_link, self.tool_frame,
                                           self.tflistener)
            return solframe_T_wr * wr_T_toolframe
def script():
    rospy.init_node('forward_kin')
    tflistener = tf.TransformListener()
    print 'waiting for transform'
    tflistener.waitForTransform('r_gripper_tool_frame', 'r_wrist_roll_link', rospy.Time(), rospy.Duration(10))
    print 'waiting for services'
    rospy.wait_for_service('pr2_right_arm_kinematics/get_fk_solver_info')#, ks.GetKinematicSolverInfo )
    rospy.wait_for_service('pr2_right_arm_kinematics/get_fk')#,             ks.GetPositionFK)
    print 'done init'
    
    r_fk_info = rospy.ServiceProxy('pr2_right_arm_kinematics/get_fk_solver_info', ks.GetKinematicSolverInfo )
    r_fk      = rospy.ServiceProxy('pr2_right_arm_kinematics/get_fk',             ks.GetPositionFK)
    
    resp = r_fk_info()
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.joint_names
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.limits
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.link_names
    
    fk_req = ks.GetPositionFKRequest()
    fk_req.header.frame_id = 'torso_lift_link'
    fk_req.fk_link_names = ['r_wrist_roll_link']
    fk_req.robot_state.joint_state.name = resp.kinematic_solver_info.joint_names
    fk_req.robot_state.joint_state.position = [.5 for i in range(len(resp.kinematic_solver_info.joint_names))]
    fk_resp = r_fk(fk_req)
    
    rtip_T_wr = tfu.transform('r_gripper_tool_frame', 'r_wrist_roll_link', tflistener)
    right_wr = tfu.pose_as_matrix(fk_resp.pose_stamped[0].pose)
    rtip_pose = rtip_T_wr * right_wr
    print tfu.matrix_as_tf(rtip_pose)
Exemple #14
0
    def go_angle(self, target_odom, tolerance=math.radians(5.), max_ang_vel=math.radians(20.), func=None):
        self.tl.waitForTransform('base_footprint', 'odom_combined', rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)
        k = math.radians(80)

        #current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint', 'odom_combined', self.tl)[0:3, 0:3], 'sxyz')[2]
        #target_odom = current_ang_odom + delta_ang

        while not rospy.is_shutdown():
            robot_odom = tfu.transform('base_footprint', 'odom_combined', self.tl)
            current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
            diff = ut.standard_rad(current_ang_odom - target_odom)
            p = k * diff
            #print diff
            if func != None:
                func(diff)
            if np.abs(diff) < tolerance:
                break

            tw = gm.Twist()
            vels = [p, np.sign(p) * max_ang_vel]
            tw.angular.z = vels[np.argmin(np.abs(vels))]
            #rospy.loginfo('diff %.3f vel %.3f' % (math.degrees(diff), math.degrees(tw.angular.z)))
            self.tw_pub.publish(tw)
            #rospy.loginfo('commanded %s' % str(tw))
            rate.sleep()
        rospy.loginfo('finished %.3f' % math.degrees(diff))
        return diff
Exemple #15
0
def pose_cb(ps):
    m_f, frame = tfu.posestamped_as_matrix(ps)
    m_o1 = tfu.transform('/omni1', frame, listener) * m_f
    ee_point = np.matrix(tr.translation_from_matrix(m_o1)).T
    center = np.matrix([-.10, 0, .30]).T
    dif = 30*(center - ee_point)
    #force_dir = dif / np.linalg.norm(dif)
    force_o1 = dif #force_dir * np.sum(np.power(dif, 2))

    force_s = tfu.transform('/omni1_sensable', '/omni1', listener) * np.row_stack((force_o1, np.matrix([1.])))
    print np.linalg.norm(center - ee_point)

    wr = Wrench()
    wr.force.x = force_s[0]
    wr.force.y = force_s[1]
    wr.force.z = force_s[2]
    wpub.publish(wr)
Exemple #16
0
 def transform_point(self, point_stamped):
     point_head = point_stamped.point
     #Tranform into base link
     base_T_head = tfu.transform(self.target_frame, point_stamped.header.frame_id, self.tf_listener)
     point_mat_head = tfu.translation_matrix([point_head.x, point_head.y, point_head.z])
     point_mat_base = base_T_head * point_mat_head
     t_base, _ = tfu.matrix_as_tf(point_mat_base)
     return np.matrix(t_base).T
    def go_angle(self, target_base, tolerance=math.radians(5.), max_ang_vel=math.radians(20.), func=None):
        self.tl.waitForTransform('odom_combined', 'base_footprint', rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)
        k = math.radians(80)

        od_T_bf = tfu.transform('odom_combined', 'base_footprint', self.tl)
        target_odom_mat = od_T_bf * tfu.tf_as_matrix( ([0, 0, 0.], tr.quaternion_from_euler(0, 0, target_base)) )
        target_odom = tr.euler_from_quaternion(tfu.matrix_as_tf(target_odom_mat)[1])[2]
        #target_odom = (od_T_bf * np.row_stack([target_base, np.matrix([0,1.]).T]))[0:2,0]

        #current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint', 'odom_combined', self.tl)[0:3, 0:3], 'sxyz')[2]
        #target_odom = current_ang_odom + delta_ang

        robot_odom = tfu.transform('odom_combined', 'base_footprint', self.tl)
        current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
        diff = ut.standard_rad(current_ang_odom - target_odom)
        rospy.loginfo('go_angle: target %.2f' %  np.degrees(target_odom))
        rospy.loginfo('go_angle: current %.2f' %  np.degrees(current_ang_odom))
        rospy.loginfo('go_angle: diff %.2f' %  np.degrees(diff))

        while not rospy.is_shutdown():
            robot_odom = tfu.transform('odom_combined', 'base_footprint', self.tl)
            current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
            diff = ut.standard_rad(target_odom - current_ang_odom)
            rospy.loginfo('go_angle: current %.2f diff %.2f' %  (np.degrees(current_ang_odom), np.degrees(diff)))
            p = k * diff
            if func != None:
                func(diff)
            if np.abs(diff) < tolerance:
                break

            if self.go_ang_server.is_preempt_requested():
                self.go_ang_server.set_preempted()
                break

            tw = gm.Twist()
            vels = [p, np.sign(p) * max_ang_vel]
            tw.angular.z = vels[np.argmin(np.abs(vels))]
            #rospy.loginfo('diff %.3f vel %.3f' % (math.degrees(diff), math.degrees(tw.angular.z)))
            self.tw_pub.publish(tw)
            #rospy.loginfo('commanded %s' % str(tw))
            rate.sleep()
        rospy.loginfo('go_ang: finished %.3f' % math.degrees(diff))
        return diff
Exemple #18
0
def pose_cb(ps):
    m_f, frame = tfu.posestamped_as_matrix(ps)
    m_o1 = tfu.transform('/omni1', frame, listener) * m_f
    ee_point = np.matrix(tr.translation_from_matrix(m_o1)).T
    center = np.matrix([-.10, 0, .30]).T
    dif = 30 * (center - ee_point)
    #force_dir = dif / np.linalg.norm(dif)
    force_o1 = dif  #force_dir * np.sum(np.power(dif, 2))

    force_s = tfu.transform('/omni1_sensable', '/omni1',
                            listener) * np.row_stack(
                                (force_o1, np.matrix([1.])))
    print np.linalg.norm(center - ee_point)

    wr = Wrench()
    wr.force.x = force_s[0]
    wr.force.y = force_s[1]
    wr.force.z = force_s[2]
    wpub.publish(wr)
Exemple #19
0
    def go_ang(self, ang, speed):
        dt = math.radians(ang)

        if dt > 0:
            sign = -1
        elif dt < 0:
            sign = 1
        else:
            sign = 0

        self.tl.waitForTransform('base_footprint', 'odom_combined',
                                 rospy.Time(), rospy.Duration(10))
        p0_base = tfu.transform('base_footprint', 'odom_combined',
                                self.tl)  # \
        start_ang = tr.euler_from_matrix(p0_base[0:3, 0:3], 'sxyz')[2]
        r = rospy.Rate(100)
        dist_so_far = 0.

        last_ang = start_ang
        while not rospy.is_shutdown():
            pcurrent_base = tfu.transform('base_footprint', 'odom_combined',
                                          self.tl)  #\
            current_ang = tr.euler_from_matrix(pcurrent_base[0:3, 0:3],
                                               'sxyz')[2]
            dist_so_far = dist_so_far + (ut.standard_rad(current_ang -
                                                         last_ang))
            if dt > 0 and dist_so_far > dt:
                rospy.loginfo('stopped! %f %f' % (dist_so_far, dt))
                break
            elif dt < 0 and dist_so_far < dt:
                rospy.loginfo('stopped! %f %f' % (dist_so_far, dt))
                break
            elif dt == 0:
                rospy.loginfo('stopped! %f %f' % (dist_so_far, dt))
                break

            tw = gm.Twist()
            tw.angular.z = math.radians(speed * sign)

            self.tw_pub.publish(tw)
            r.sleep()
            last_ang = current_ang
Exemple #20
0
    def print_pose(self):
        try:
            tfu.wait('map', 'base_footprint', self.tflistener, 1.)
            m_T_b = tfu.transform('map', 'base_footprint', self.tflistener)
            t, q = tfu.matrix_as_tf(m_T_b)

            print m_T_b
            print 't', t, 'q', q

        except Exception, e:
            print e
Exemple #21
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not(from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        p = np.row_stack((p, np.matrix([1.])))
        pp = self.P * p
        pp = pp / pp[2,0]
        return pp[0:2,0]
Exemple #22
0
 def move_relative(self,
                   movement_target,
                   target_frame,
                   stop='pressure_accel',
                   pressure=150):
     base_T_target = tfu.transform('base_link', target_frame,
                                   self.tf_listener)
     movement_base = base_T_target[0:3, 0:3] * movement_target
     return self.move_relative_base(movement_base,
                                    stop=stop,
                                    pressure=pressure)
Exemple #23
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not(from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        p = np.row_stack((p, np.matrix([1.])))
        pp = self.P * p
        pp = pp / pp[2,:]
        return pp[0:2,:]
    def torso_lift_link_T_omni(self, tip_omni, msg_frame):
        center_T_6 = tfu.transform(self.omni_name+'_center', msg_frame, self.tflistener)
        tip_center = center_T_6*tip_omni
        tip_center_t = (np.array(tr.translation_from_matrix(tip_center))*np.array(self.scale_omni_l0)).tolist()
        tip_center_q = tr.quaternion_from_matrix(tip_center)

        # z_T_6 = tfu.transform(self.omni_name+'_link0', msg_frame, self.tflistener)
        # tip_0 = z_T_6*tip_omni
        # tip_0t = (np.array(tr.translation_from_matrix(tip_0))*np.array(self.scale_omni_l0)).tolist()
        # tip_0q = tr.quaternion_from_matrix(tip_0)

    #     #Transform into torso frame so we can bound arm workspace
        tll_T_0 = tfu.transform('/torso_lift_link', self.omni_name + '_center', self.tflistener)
        tip_torso_mat = tll_T_0 * tfu.tf_as_matrix([tip_center_t, tip_center_q])
        tip_tt, tip_tq = tfu.matrix_as_tf(tip_torso_mat)

        #don't need to bound, arm controller should do this
    #     tip_tt[0] = limit_range(tip_tt[0], self.X_BOUNDS[0], self.X_BOUNDS[1])
        self.tip_tt = tip_tt
        self.tip_tq = tip_tq
Exemple #25
0
    def run(self):
        r = rospy.Rate(50)
        while not rospy.is_shutdown():
            try:
                common_link = "/base_link"
                c_T_rgrip = tfu.transform(common_link, "/r_gripper_tool_frame", self.tflistener)
                c_T_lgrip = tfu.transform(common_link, "/l_gripper_tool_frame", self.tflistener)
                gripper_right_c = np.matrix(tr.translation_from_matrix(c_T_rgrip * tr.translation_matrix([0, 0, 0.0])))
                gripper_left_c = np.matrix(tr.translation_from_matrix(c_T_lgrip * tr.translation_matrix([0, 0, 0.0])))
                look_at_point_c = ((gripper_right_c + gripper_left_c) / 2.0).A1.tolist()

                g = PointHeadGoal()
                g.target.header.frame_id = "/base_link"
                g.target.point = Point(*look_at_point_c)
                g.min_duration = rospy.Duration(1.0)
                g.max_velocity = 10.0
                self.head_client.send_goal(g)
                self.head_client.wait_for_result(rospy.Duration(1.0))
                r.sleep()
            except tf.LookupException, e:
                print e
Exemple #26
0
 def move_relative_gripper(self,
                           movement_tool,
                           stop='pressure_accel',
                           pressure=150):
     base_T_tool = tfu.transform('base_link', self.tool_frame,
                                 self.tf_listener)
     movement_base = base_T_tool[
         0:3, 0:
         3] * movement_tool  # np.concatenate((movement_tool, np.matrix([1])))
     return self.move_relative_base(movement_base,
                                    stop=stop,
                                    pressure=pressure)
    def _move_cartesian_ik(self, position, orientation, \
            stop_funcs=[], timeout = 30., settling_time = 0.25, \
            frame='base_link', vel=.15):
        #pdb.set_trace()
        #self.arm_obj.set_cart_pose_ik(cart_pose, total_time=motion_length, frame=frame, block=False)
        #cart_pose = tfu.tf_as_matrix((position.A1.tolist(), orientation.A1.tolist()))

        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame, rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame, self.tf_listener)
        cart_pose = tfu.tf_as_matrix((position.A1.tolist(), orientation.A1.tolist()))
        cart_pose = cart_pose * toolframe_T_ikframe
        position, orientation = tfu.matrix_as_tf(cart_pose)

        #goal_pose_ps = create_pose_stamped(position.A1.tolist() + orientation.A1.tolist(), frame)
        goal_pose_ps = cf.create_pose_stamped(position.tolist() + orientation.tolist(), frame)
        r = self.reactive_gr.cm.move_cartesian_ik(goal_pose_ps, blocking=0, step_size=.005, \
                pos_thres=.02, rot_thres=.1, timeout=rospy.Duration(timeout),
                settling_time=rospy.Duration(settling_time), vel=vel)
        if not (r == 'sent goal' or r == 'success'):
            return r

        #move_cartesian_ik(self, goal_pose, collision_aware = 0, blocking = 1,                            
        #                  step_size = .005, pos_thres = .02, rot_thres = .1,                                
        #                  timeout = rospy.Duration(30.),                                                    
        #                  settling_time = rospy.Duration(0.25), vel = .15):   

        stop_trigger = None
        done_time = None
        start_time = rospy.get_rostime()
        while stop_trigger == None:
            for f, name in stop_funcs:
                if f():
                    self.arm_obj.stop_trajectory_execution()
                    stop_trigger = name
                    rospy.loginfo('"%s" requested that motion should be stopped.' % (name))
                    break

            if timeout != 0. and rospy.get_rostime() - start_time > rospy.Duration(timeout):
                rospy.loginfo("_move_cartesian_ik: motion timed out")
                break

            if (not done_time) and (not self.arm_obj.has_active_goal()):
                #rospy.loginfo("check_cartesian_done returned 1")
                done_time = rospy.get_rostime()

            if done_time and rospy.get_rostime() - done_time > rospy.Duration(settling_time):
                rospy.loginfo("_move_cartesian_ik: done settling")
                break

        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        return stop_trigger
Exemple #28
0
    def joint_state_cb(self, jmsg):
        tdict = {}

        # good for display purposes (independent of torso link etc)
        tdict['bf_T_rtip'] = tfu.transform('/base_footprint',
                                           self.ftip_frames[0],
                                           self.tflistener)
        tdict['bf_T_ltip'] = tfu.transform('/base_footprint',
                                           self.ftip_frames[1],
                                           self.tflistener)

        # want FK from torso
        tdict['torso_T_rtip'] = tfu.transform('/torso_lift_link',
                                              self.ftip_frames[0],
                                              self.tflistener)
        tdict['torso_T_ltip'] = tfu.transform('/torso_lift_link',
                                              self.ftip_frames[1],
                                              self.tflistener)

        #self.jstate_msgs.append(SimpleJointStateMsg(jmsg.header, tdict))
        self.last_jstate_time = time.time()
    def approach_perpendicular_to_surface(self, point_bl, voi_radius, dist_approach):
        map_T_base_link0 = tfu.transform('map', 'base_link', self.tf_listener)
        point_map0 = tfu.transform_points(map_T_base_link0, point_bl)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)

        point_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), \
                                        point_map0)
        point_cloud_bl = self.laser_scan.scan(math.radians(180.), math.radians(-180.), 2.5)
        point_cloud_np_bl = ru.pointcloud_to_np(point_cloud_bl)
        rospy.loginfo('approach_perpendicular_to_surface: pointcloud size %d' \
                % point_cloud_np_bl.shape[1])
        voi_points_bl, limits_bl = i3d.select_rect(point_bl, voi_radius, voi_radius, voi_radius, point_cloud_np_bl)
        #TODO: use closest plane instead of closest points determined with KDTree
        normal_bl = i3d.calc_normal(voi_points_bl)
        point_in_front_mechanism_bl = point_bl + normal_bl * dist_approach
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_in_front_mechanism_map = tfu.transform_points(map_T_base_link, point_in_front_mechanism_bl)

        #Navigate to point (TODO: check for collisions)
        point_map = tfu.transform_points(map_T_base_link, point_bl)
        t_current_map, r_current_map = self.robot.base.get_pose()
        rospy.loginfo('approach_perpendicular_to_surface: driving for %.3f m to front of surface' \
                % np.linalg.norm(t_current_map[0:2] - point_in_front_mechanism_map[0:2,0].T))
        #pdb.set_trace()
        rvalue = self.robot.base.set_pose(point_in_front_mechanism_map.T.A1.tolist(), r_current_map, 'map')
        if rvalue != 3:
            return rvalue

        t1_current_map, r1_current_map = self.robot.base.get_pose()
        rospy.loginfo('approach_perpendicular_to_surface: %.3f m away from from of surface' % np.linalg.norm(t1_current_map[0:2] - point_in_front_mechanism_map[0:2,0].T))

        #Rotate to face point (TODO: check for collisions)
        base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
        point_bl = tfu.transform_points(base_link_T_map, point_map)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)
        time.sleep(2.)

        return rvalue
    def project(self, p, tf_listener=None, from_frame=None):
        if not self.has_msg:
            raise RuntimeError('Has not been initialized with a CameraInfo message (call camera_info).')
        if not(from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        hrow = np.matrix(np.zeros((1,p.shape[1])))
        p = np.row_stack((p, hrow))
        pp = self.P * p
        pp = pp / pp[2,:]
        return pp[0:2,:]
    def transform_point(self, point_stamped):
        point_head = point_stamped.point

        #Tranform into base link
        target_link = '/base_link'
        base_T_head = tfu.transform(target_link, point_stamped.header.frame_id, self.tflistener)
        point_mat_head = tfu.translation_matrix([point_head.x, point_head.y, point_head.z])
        point_mat_base = base_T_head * point_mat_head
        t_base, o_base = tfu.matrix_as_tf(point_mat_base)

        #Calculate angle robot should face
        angle = math.atan2(t_base[1], t_base[0])
        q_base = tf.transformations.quaternion_from_euler(0, 0, angle)
        return (t_base, q_base, target_link)
Exemple #32
0
    def ik(self, cart_pose, frame_of_pose='torso_lift_link', seed=None):
        #if frame_of_pose == self.tool_frame or frame_of_pose == None:

        self.tflistener.waitForTransform(self.ik_frame, self.tool_frame, rospy.Time(), rospy.Duration(10))
        #wr_T_toolframe = tfu.transform(sol_link, self.tool_frame, self.tflistener)
        #solframe_T_wr * wr_T_toolframe
        #print 'undoing'
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame, self.tflistener)
        cart_pose = cart_pose * toolframe_T_ikframe 
        #frame_of_pose = self.tool_frame

        trans, rot = tfu.matrix_as_tf(cart_pose)

        ik_req = ks.GetPositionIKRequest()
        ik_req.timeout = rospy.Duration(5.0)
        ik_req.ik_request.ik_link_name = self.ik_frame

        #set pose
        ik_req.ik_request.pose_stamped.header.frame_id = frame_of_pose
        ik_req.ik_request.pose_stamped.pose.position.x = trans[0]#cart_pose[0][0,0]
        ik_req.ik_request.pose_stamped.pose.position.y = trans[1]#cart_pose[0][1,0]
        ik_req.ik_request.pose_stamped.pose.position.z = trans[2]#cart_pose[0][2,0]

        ik_req.ik_request.pose_stamped.pose.orientation.x = rot[0]#cart_pose[1][0,0];
        ik_req.ik_request.pose_stamped.pose.orientation.y = rot[1]#cart_pose[1][1,0];
        ik_req.ik_request.pose_stamped.pose.orientation.z = rot[2]#cart_pose[1][2,0];
        ik_req.ik_request.pose_stamped.pose.orientation.w = rot[3]#cart_pose[1][3,0];

        #seed solver
        ik_req.ik_request.ik_seed_state.joint_state.name = self.ik_joint_names
        if seed == None:
            p = []
            for i in range(len(self.ik_joint_names)):
                minp = self.ik_info_resp.kinematic_solver_info.limits[i].min_position
                maxp = self.ik_info_resp.kinematic_solver_info.limits[i].max_position
                p.append((minp + maxp) / 2.0)
            ik_req.ik_request.ik_seed_state.joint_state.position = p
        else:
            if seed.__class__ == np.matrix:
                seed = seed.T.A1.tolist()
            ik_req.ik_request.ik_seed_state.joint_state.position = seed

        response = self._ik(ik_req)
        if response.error_code.val == response.error_code.SUCCESS:
            #print 'success'
            return np.matrix(response.solution.joint_state.position).T
        else:
            #print 'fail', response.__class__, response
            print response
            return None
    def transform_point(self, point_stamped):
        point_head = point_stamped.point

        #Tranform into base link
        target_link = '/base_link'
        base_T_head = tfu.transform(target_link, point_stamped.header.frame_id, self.tflistener)
        point_mat_head = tfu.translation_matrix([point_head.x, point_head.y, point_head.z])
        point_mat_base = base_T_head * point_mat_head
        t_base, o_base = tfu.matrix_as_tf(point_mat_base)

        #Calculate angle robot should face
        angle = math.atan2(t_base[1], t_base[0])
        q = tf.transformations.quaternion_from_euler(0, 0, angle)
        return (t_base, q, target_link)
    def go_ang(self, ang, speed):
        dt = math.radians(ang)

        if dt > 0:
            sign = -1
        elif dt < 0:
            sign = 1
        else:
            sign = 0

        self.tl.waitForTransform("base_footprint", "odom_combined", rospy.Time(), rospy.Duration(10))
        p0_base = tfu.transform("base_footprint", "odom_combined", self.tl)  # \
        start_ang = tr.euler_from_matrix(p0_base[0:3, 0:3], "sxyz")[2]
        r = rospy.Rate(100)
        dist_so_far = 0.0

        last_ang = start_ang
        while not rospy.is_shutdown():
            pcurrent_base = tfu.transform("base_footprint", "odom_combined", self.tl)  # \
            current_ang = tr.euler_from_matrix(pcurrent_base[0:3, 0:3], "sxyz")[2]
            dist_so_far = dist_so_far + (ut.standard_rad(current_ang - last_ang))
            if dt > 0 and dist_so_far > dt:
                rospy.loginfo("stopped! %f %f" % (dist_so_far, dt))
                break
            elif dt < 0 and dist_so_far < dt:
                rospy.loginfo("stopped! %f %f" % (dist_so_far, dt))
                break
            elif dt == 0:
                rospy.loginfo("stopped! %f %f" % (dist_so_far, dt))
                break

            tw = gm.Twist()
            tw.angular.z = math.radians(speed * sign)

            self.tw_pub.publish(tw)
            r.sleep()
            last_ang = current_ang
Exemple #35
0
    def torso_lift_link_T_omni(self, tip_omni, msg_frame):
        center_T_6 = tfu.transform(self.omni_name + '_center', msg_frame,
                                   self.tflistener)
        tip_center = center_T_6 * tip_omni
        tip_center_t = (np.array(tr.translation_from_matrix(tip_center)) *
                        np.array(self.scale_omni_l0)).tolist()
        tip_center_q = tr.quaternion_from_matrix(tip_center)

        # z_T_6 = tfu.transform(self.omni_name+'_link0', msg_frame, self.tflistener)
        # tip_0 = z_T_6*tip_omni
        # tip_0t = (np.array(tr.translation_from_matrix(tip_0))*np.array(self.scale_omni_l0)).tolist()
        # tip_0q = tr.quaternion_from_matrix(tip_0)

        #     #Transform into torso frame so we can bound arm workspace
        tll_T_0 = tfu.transform('/torso_lift_link', self.omni_name + '_center',
                                self.tflistener)
        tip_torso_mat = tll_T_0 * tfu.tf_as_matrix(
            [tip_center_t, tip_center_q])
        tip_tt, tip_tq = tfu.matrix_as_tf(tip_torso_mat)

        #don't need to bound, arm controller should do this
        #     tip_tt[0] = limit_range(tip_tt[0], self.X_BOUNDS[0], self.X_BOUNDS[1])
        self.tip_tt = tip_tt
        self.tip_tq = tip_tq
Exemple #36
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not self.has_msg:
            raise RuntimeError(
                'Has not been initialized with a CameraInfo message (call camera_info).'
            )
        if not (from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        hrow = np.matrix(np.zeros((1, p.shape[1])))
        p = np.row_stack((p, hrow))
        pp = self.P * p
        pp = pp / pp[2, :]
        return pp[0:2, :]
    def drive_approach_behavior(self, point_bl, dist_far):
    # navigate close to point
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_map = tfu.transform_points(map_T_base_link, point_bl)
        t_current_map, r_current_map = self.robot.base.get_pose()
        rospy.loginfo('drive_approach_behavior: point is %.3f m away"' % np.linalg.norm(t_current_map[0:2].T - point_map[0:2,0].T))

        point_dist = np.linalg.norm(point_bl)
        bounded_dist = np.max(point_dist - dist_far, 0)
        point_close_bl = (point_bl / point_dist) * bounded_dist
        point_close_map = tfu.transform_points(map_T_base_link, point_close_bl)
        rvalue = self.robot.base.set_pose(point_close_map.T.A1.tolist(), \
                                          r_current_map, '/map', block=True)
        t_end, r_end = self.robot.base.get_pose()
        rospy.loginfo('drive_approach_behavior: ended up %.3f m away from laser point' % np.linalg.norm(t_end[0:2] - point_map[0:2,0].T))
        rospy.loginfo('drive_approach_behavior: ended up %.3f m away from goal' % np.linalg.norm(t_end[0:2] - point_close_map[0:2,0].T))
        rospy.loginfo('drive_approach_behavior: returned %d' % rvalue)
        return rvalue
    def scan(self, point3d):
        rospy.loginfo('InterestPointPerception: scanning..')
        point_cloud_bl = self.laser_scan.scan(math.radians(180.), math.radians(-180.), 15)
        prosilica_image = self.prosilica.get_frame()
        image_T_laser = tfu.transform('/high_def_optical_frame', '/base_link', self.tf_listener)

        #Extract features
        self.feature_extractor = r3d.IntensityCloudData(point_cloud_bl, 
                prosilica_image, image_T_laser, self.prosilica_cal, 
                point3d, self.rec_params)

        fex = self.feature_extractor
        self.disp.display_scan(fex.point_cloud3d_orig, fex.points3d_valid_laser, fex.colors_valid,
                prosilica_image, self.prosilica_cal)

        rospy.loginfo('InterestPointPerception: extracting features..')
        #self.instances, self.points2d, self.points3d = self.feature_extractor.extract_vectorized_features()
        rospy.loginfo('InterestPointPerception: finished extraction.')
Exemple #39
0
    def fk(self, joint_poses_mat, frame='torso_lift_link', sol_link=None, use_tool_frame=True):
        if sol_link == None:
            sol_link = self.ik_frame

        fk_req = ks.GetPositionFKRequest()
        fk_req.header.frame_id = frame
        fk_req.fk_link_names = [sol_link]
        fk_req.robot_state.joint_state.name = self.fk_info_resp.kinematic_solver_info.joint_names
        fk_req.robot_state.joint_state.position = joint_poses_mat.T.A1.tolist() 
        fk_resp = self._fk(fk_req)
        
        solframe_T_wr = tfu.pose_as_matrix(fk_resp.pose_stamped[0].pose)
        if not use_tool_frame:
            return solframe_T_wr
        else:
            #print 'redoing'
            self.tflistener.waitForTransform(self.tool_frame, sol_link, rospy.Time(), rospy.Duration(10))
            wr_T_toolframe = tfu.transform(sol_link, self.tool_frame, self.tflistener)
            return solframe_T_wr * wr_T_toolframe
    def scan(self, point3d):
        rospy.loginfo('InterestPointPerception: scanning..')
        point_cloud_bl = self.laser_scan.scan(math.radians(180.),
                                              math.radians(-180.), 15)
        prosilica_image = self.prosilica.get_frame()
        image_T_laser = tfu.transform('/high_def_optical_frame', '/base_link',
                                      self.tf_listener)

        #Extract features
        self.feature_extractor = r3d.IntensityCloudData(
            point_cloud_bl, prosilica_image, image_T_laser, self.prosilica_cal,
            point3d, self.rec_params)

        fex = self.feature_extractor
        self.disp.display_scan(fex.point_cloud3d_orig,
                               fex.points3d_valid_laser, fex.colors_valid,
                               prosilica_image, self.prosilica_cal)

        rospy.loginfo('InterestPointPerception: extracting features..')
        #self.instances, self.points2d, self.points3d = self.feature_extractor.extract_vectorized_features()
        rospy.loginfo('InterestPointPerception: finished extraction.')
def script():
    rospy.init_node('forward_kin')
    tflistener = tf.TransformListener()
    print 'waiting for transform'
    tflistener.waitForTransform('r_gripper_tool_frame', 'r_wrist_roll_link',
                                rospy.Time(), rospy.Duration(10))
    print 'waiting for services'
    rospy.wait_for_service('pr2_right_arm_kinematics/get_fk_solver_info'
                           )  #, ks.GetKinematicSolverInfo )
    rospy.wait_for_service(
        'pr2_right_arm_kinematics/get_fk')  #,             ks.GetPositionFK)
    print 'done init'

    r_fk_info = rospy.ServiceProxy(
        'pr2_right_arm_kinematics/get_fk_solver_info',
        ks.GetKinematicSolverInfo)
    r_fk = rospy.ServiceProxy('pr2_right_arm_kinematics/get_fk',
                              ks.GetPositionFK)

    resp = r_fk_info()
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.joint_names
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.limits
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.link_names

    fk_req = ks.GetPositionFKRequest()
    fk_req.header.frame_id = 'torso_lift_link'
    fk_req.fk_link_names = ['r_wrist_roll_link']
    fk_req.robot_state.joint_state.name = resp.kinematic_solver_info.joint_names
    fk_req.robot_state.joint_state.position = [
        .5 for i in range(len(resp.kinematic_solver_info.joint_names))
    ]
    fk_resp = r_fk(fk_req)

    rtip_T_wr = tfu.transform('r_gripper_tool_frame', 'r_wrist_roll_link',
                              tflistener)
    right_wr = tfu.pose_as_matrix(fk_resp.pose_stamped[0].pose)
    rtip_pose = rtip_T_wr * right_wr
    print tfu.matrix_as_tf(rtip_pose)
Exemple #42
0
    def follow_point_cb(self, point_stamped):
        point_head = point_stamped.point

        base_T_head = tfu.transform('/base_link', point_stamped.header.frame_id, self.tflistener)
        point_mat_head = tfu.translation_matrix([point.x, point.y, point.z])
        point_mat_base = base_T_head * point_mat_head
        t_base, o_base = tfu.matrix_as_tf(point_mat_base)
        x = t_base[0]
        y = t_base[1]
        angle = math.atan2(y, x)

        ps = PoseStamped()
        ps.header.frame_id = '/base_link'
        ps.pose.position.x = x
        ps.pose.position.y = y
        ps.pose.position.z = 0.

        q = tf.transformations.quaternion_from_euler(angle, 0, 0)
        ps.pose.orientation.x = q[0]
        ps.pose.orientation.y = q[1]
        ps.pose.orientation.z = q[2]
        ps.pose.orientation.w = q[3]
        self.move_pub.publish(ps)
Exemple #43
0
 def _turn_by(self, delta_ang, block=True):
     current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint',\
                             'odom_combined', self.tflistener)[0:3, 0:3], 'sxyz')[2]
     self.turn_to(current_ang_odom + delta_ang, block)
Exemple #44
0
    def _move_cartesian_cart(self, position, orientation, \
            stop_funcs=[], timeout = 3.0, settling_time = 0.5, frame='base_link'):

        # TODO: Test this function...
        # Transform the pose from 'frame' to 'base_link'
        self.tf_listener.waitForTransform('base_link', frame, rospy.Time(),
                                          rospy.Duration(10))
        frame_T_base = tfu.transform('base_link', frame, self.tf_listener)
        init_cart_pose = tfu.tf_as_matrix(
            (position.A1.tolist(), orientation.A1.tolist()))
        base_cart_pose = frame_T_base * init_cart_pose

        # Get IK from tool frame to wrist frame for control input
        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame,
                                          rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame,
                                            self.tf_listener)
        #base_cart_pose = tfu.tf_as_matrix((base_position.A1.tolist(), base_orientation.A1.tolist()))
        base_cart_pose = base_cart_pose * toolframe_T_ikframe
        base_position, base_orientation = tfu.matrix_as_tf(base_cart_pose)

        pose_stamped = cf.create_pose_stamped(base_position.tolist() +
                                              base_orientation.tolist())
        rg = self.reactive_gr
        rg.check_preempt()

        #send the goal to the Cartesian controllers
        #rospy.loginfo("sending goal to Cartesian controllers")
        (pos, rot) = cf.pose_stamped_to_lists(rg.cm.tf_listener, pose_stamped,
                                              'base_link')
        rg.move_cartesian_step(pos + rot, timeout, settling_time)

        #watch the fingertip/palm sensors until the controllers are done and then some
        start_time = rospy.get_rostime()
        done_time = None
        #stopped = False
        stop_trigger = None
        #print 'enterning loop'
        stop_detector = ArmStoppedDetector()
        while (1):

            rg.check_preempt()
            if len(stop_funcs) > 0:
                for f, name in stop_funcs:
                    if f():
                        rg.cm.switch_to_joint_mode()
                        rg.cm.freeze_arm()
                        #stopped = True
                        stop_trigger = name
                        rospy.loginfo(
                            '"%s" requested that motion should be stopped.' %
                            (name))
                        break
                if stop_trigger != None:
                    break

            #if stop_func != None and stop_func():
            #    rg.cm.switch_to_joint_mode()
            #    rg.cm.freeze_arm()
            #    stopped = True
            #    break

            #check if we're actually there
            if rg.cm.check_cartesian_near_pose(pose_stamped, .0025, .05):
                rospy.loginfo("_move_cartesian_cart: reached pose")
                #stop_trigger = 'at_pose'
                break

            stop_detector.record_diff(self.arm_obj.pose_cartesian())
            if stop_detector.is_stopped():
                rospy.loginfo("_move_cartesian_cart: arm stopped")
                #stop_trigger = 'stopped'
                break

            # if rg.cm.check_cartesian_really_done(pose_stamped, .0025, .05):
            #     #rospy.loginfo("actually got there")
            #     break
            #
            # #check if the controllers think we're done
            # if not done_time and rg.cm.check_cartesian_done():
            #     #rospy.loginfo("check_cartesian_done returned 1")
            #     done_time = rospy.get_rostime()

            # #done settling
            # if done_time and rospy.get_rostime() - done_time > rospy.Duration(settling_time):
            #     rospy.loginfo("done settling")
            #     break

            #timed out
            #if timeout != 0. and rospy.get_rostime() - start_time > rospy.Duration(timeout):
            #    rospy.loginfo("_move_cartesian_cart: timed out")
            #    break

        #if stop_trigger == 'pressure_safety' or stop_trigger == 'self_collision':
        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        #name = ut.formatted_time() + '_stop_detector.pkl'
        #print 'saved', name
        #ut.save_pickle(stop_detector, name)
        return stop_trigger
Exemple #45
0
 def get_pose(self):
     p_base = tfu.transform('map', 'base_footprint', self.tflistener) \
             * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
 def move_relative(self, movement_target, target_frame, stop='pressure_accel', pressure=150):
     base_T_target = tfu.transform('base_link', target_frame, self.tf_listener)
     movement_base = base_T_target[0:3, 0:3] * movement_target
     return self.move_relative_base(movement_base, stop=stop, pressure=pressure)
 def move_relative_gripper(self, movement_tool, stop='pressure_accel', pressure=150):
     base_T_tool = tfu.transform('base_link', self.tool_frame, self.tf_listener)
     movement_base = base_T_tool[0:3, 0:3] * movement_tool # np.concatenate((movement_tool, np.matrix([1])))
     return self.move_relative_base(movement_base, stop=stop, pressure=pressure)
    def omni_pose_cb(self, msg):
        self.publish_rviz_markers()
        if self.enabled:
            #Get the omni's tip pose in the PR2's torso frame
            tip_omni, msg_frame = tfu.posestamped_as_matrix(msg)
            self.torso_lift_link_T_omni(tip_omni, msg_frame)
            #self.torso_T_omni(tip_omni, msg_frame)

            #Publish new arm pose
            ps = PoseStamped()
            ps.header.frame_id = '/torso_lift_link'
            ps.header.stamp = rospy.get_rostime()
            ps.pose.position.x = self.tip_tt[0]
            ps.pose.position.y = self.tip_tt[1]
            ps.pose.position.z = self.tip_tt[2]
            ps.pose.orientation.x = self.tip_tq[0]
            ps.pose.orientation.y = self.tip_tq[1]
            ps.pose.orientation.z = self.tip_tq[2]
            ps.pose.orientation.w = self.tip_tq[3]

            self.set_goal_pub.publish(ps)

            if self.zero_out_forces:
                wr = OmniFeedback()
                wr.force.x = 0 
                wr.force.y = 0 
                wr.force.z = 0 
                self.omni_fb.publish(wr)
                self.zero_out_forces = False
        else:
            #this is a zero order hold publishing the last received values until the control loop is active again
            tip_tt = self.tip_tt
            tip_tq = self.tip_tq
            ps = PointStamped()
            ps.header.frame_id = '/torso_lift_link'
            ps.header.stamp = rospy.get_rostime()
            ps.point.x = tip_tt[0]
            ps.point.y = tip_tt[1]
            ps.point.z = tip_tt[2]
            ps.pose.orientation.x = tip_tq[0]
            ps.pose.orientation.y = tip_tq[1]
            ps.pose.orientation.z = tip_tq[2]
            ps.pose.orientation.w = tip_tq[3]

            self.set_goal_pub.publish(ps)

            #this is to make the omni force well move if the arm has moved but the commanded
            #position of the arm has not changed
            tip_omni, msg_frame = tfu.posestamped_as_matrix(msg)
            m_o1 = tfu.transform(self.omni_name, msg_frame, self.tflistener) * tip_omni
            ee_point = np.matrix(tr.translation_from_matrix(m_o1)).T
            q = self.robot.get_joint_angles()
            pos, rot = self.robot.kinematics.FK(q, self.robot.kinematics.n_jts)
            # tip_torso = tfu.transform('/torso_lift_link', self.gripper_tip_frame, self.tflistener) \
            #                       * tfu.tf_as_matrix(([0.,0.,0.], tr.quaternion_from_euler(0,0,0)))

            tip_torso = hrl_tr.composeHomogeneousTransform(rot, pos)
            #tip_torso = tfu.tf_as_matrix(([pos, rot]))
            #def composeHomogeneousTransform(rot, disp):

            center_t, center_q = self.omni_T_torso(tip_torso)
            center_col_vec = np.matrix(center_t).T

            #Send force control info
            wr = OmniFeedback()
            # offsets (0, -.268, -.15) introduced by Hai in phantom driver
            # should be cleaned up at some point so that it is consistent with position returned by phantom -marc
            lock_pos = tr.translation_matrix(np.matrix([0,-.268,-.150]))*tfu.transform(self.omni_name+'_sensable', self.omni_name, self.tflistener)*np.row_stack((center_col_vec, np.matrix([1.])))

            wr.position.x = (lock_pos[0,0])*1000.0  #multiply by 1000 mm/m to get units phantom expects
            wr.position.y = (lock_pos[1,0])*1000.0 
            wr.position.z = (lock_pos[2,0])*1000.0 

            # wr.position.x = tip_tt[0]  #multiply by 1000 mm/m to get units phantom expects
            # wr.position.y = tip_tt[1]
            # wr.position.z = tip_tt[2]
            self.omni_fb.publish(wr)
    def _move_cartesian_cart(self, position, orientation, \
            stop_funcs=[], timeout = 3.0, settling_time = 0.5, frame='base_link'):

        # TODO: Test this function...
        # Transform the pose from 'frame' to 'base_link'
        self.tf_listener.waitForTransform('base_link', frame, rospy.Time(),
                                          rospy.Duration(10))
        frame_T_base = tfu.transform('base_link', frame, self.tf_listener)
        init_cart_pose = tfu.tf_as_matrix((position.A1.tolist(),
                                           orientation.A1.tolist()))
        base_cart_pose = frame_T_base*init_cart_pose

        # Get IK from tool frame to wrist frame for control input
        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame, rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame, self.tf_listener)
        #base_cart_pose = tfu.tf_as_matrix((base_position.A1.tolist(), base_orientation.A1.tolist()))
        base_cart_pose = base_cart_pose * toolframe_T_ikframe
        base_position, base_orientation = tfu.matrix_as_tf(base_cart_pose)

        pose_stamped = cf.create_pose_stamped(base_position.tolist() + base_orientation.tolist())
        rg = self.reactive_gr
        rg.check_preempt()

        #send the goal to the Cartesian controllers
        #rospy.loginfo("sending goal to Cartesian controllers")
        (pos, rot) = cf.pose_stamped_to_lists(rg.cm.tf_listener, pose_stamped, 'base_link')
        rg.move_cartesian_step(pos+rot, timeout, settling_time)

        #watch the fingertip/palm sensors until the controllers are done and then some
        start_time = rospy.get_rostime()
        done_time = None
        #stopped = False
        stop_trigger = None
        #print 'enterning loop'
        stop_detector = ArmStoppedDetector()
        while(1):

            rg.check_preempt()
            if len(stop_funcs) > 0:
                for f, name in stop_funcs:
                    if f():
                        rg.cm.switch_to_joint_mode()
                        rg.cm.freeze_arm()
                        #stopped = True
                        stop_trigger = name
                        rospy.loginfo('"%s" requested that motion should be stopped.' % (name))
                        break
                if stop_trigger != None:
                    break

            #if stop_func != None and stop_func():
            #    rg.cm.switch_to_joint_mode()
            #    rg.cm.freeze_arm()
            #    stopped = True
            #    break

            #check if we're actually there
            if rg.cm.check_cartesian_near_pose(pose_stamped, .0025, .05):
                rospy.loginfo("_move_cartesian_cart: reached pose")
                #stop_trigger = 'at_pose'
                break

            stop_detector.record_diff(self.arm_obj.pose_cartesian())
            if stop_detector.is_stopped():
                rospy.loginfo("_move_cartesian_cart: arm stopped")
                #stop_trigger = 'stopped'
                break

            # if rg.cm.check_cartesian_really_done(pose_stamped, .0025, .05):
            #     #rospy.loginfo("actually got there")
            #     break
            #
            # #check if the controllers think we're done
            # if not done_time and rg.cm.check_cartesian_done():
            #     #rospy.loginfo("check_cartesian_done returned 1")
            #     done_time = rospy.get_rostime()

            # #done settling
            # if done_time and rospy.get_rostime() - done_time > rospy.Duration(settling_time):
            #     rospy.loginfo("done settling")
            #     break

            #timed out
            #if timeout != 0. and rospy.get_rostime() - start_time > rospy.Duration(timeout):
            #    rospy.loginfo("_move_cartesian_cart: timed out")
            #    break

        #if stop_trigger == 'pressure_safety' or stop_trigger == 'self_collision':
        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        #name = ut.formatted_time() + '_stop_detector.pkl'
        #print 'saved', name
        #ut.save_pickle(stop_detector, name)
        return stop_trigger
Exemple #50
0
    def _move_cartesian_ik(self, position, orientation, \
            stop_funcs=[], timeout = 30., settling_time = 0.25, \
            frame='base_link', vel=.15):
        #pdb.set_trace()
        #self.arm_obj.set_cart_pose_ik(cart_pose, total_time=motion_length, frame=frame, block=False)
        #cart_pose = tfu.tf_as_matrix((position.A1.tolist(), orientation.A1.tolist()))

        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame,
                                          rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame,
                                            self.tf_listener)
        cart_pose = tfu.tf_as_matrix(
            (position.A1.tolist(), orientation.A1.tolist()))
        cart_pose = cart_pose * toolframe_T_ikframe
        position, orientation = tfu.matrix_as_tf(cart_pose)

        #goal_pose_ps = create_pose_stamped(position.A1.tolist() + orientation.A1.tolist(), frame)
        goal_pose_ps = cf.create_pose_stamped(
            position.tolist() + orientation.tolist(), frame)
        r = self.reactive_gr.cm.move_cartesian_ik(goal_pose_ps, blocking=0, step_size=.005, \
                pos_thres=.02, rot_thres=.1, timeout=rospy.Duration(timeout),
                settling_time=rospy.Duration(settling_time), vel=vel)
        if not (r == 'sent goal' or r == 'success'):
            return r

        #move_cartesian_ik(self, goal_pose, collision_aware = 0, blocking = 1,
        #                  step_size = .005, pos_thres = .02, rot_thres = .1,
        #                  timeout = rospy.Duration(30.),
        #                  settling_time = rospy.Duration(0.25), vel = .15):

        stop_trigger = None
        done_time = None
        start_time = rospy.get_rostime()
        while stop_trigger == None:
            for f, name in stop_funcs:
                if f():
                    self.arm_obj.stop_trajectory_execution()
                    stop_trigger = name
                    rospy.loginfo(
                        '"%s" requested that motion should be stopped.' %
                        (name))
                    break

            if timeout != 0. and rospy.get_rostime(
            ) - start_time > rospy.Duration(timeout):
                rospy.loginfo("_move_cartesian_ik: motion timed out")
                break

            if (not done_time) and (not self.arm_obj.has_active_goal()):
                #rospy.loginfo("check_cartesian_done returned 1")
                done_time = rospy.get_rostime()

            if done_time and rospy.get_rostime() - done_time > rospy.Duration(
                    settling_time):
                rospy.loginfo("_move_cartesian_ik: done settling")
                break

        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        return stop_trigger
Exemple #51
0
 def frame_loc(from_frame):
     p_base = tfu.transform(self.base_frame, from_frame, self.tflistener) \
                    * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemple #52
0
 def pose_cartesian(self, frame='base_link'):
     gripper_tool_frame = self.arm + '_gripper_tool_frame'
     return tfu.transform(frame, gripper_tool_frame, self.tf_listener)
Exemple #53
0
def process_bag(full_bag_name,
                prosilica_image_file,
                model_image_file,
                experiment_start_condition_pkl,
                arm_used='left'):
    bag_path, bag_name_ext = os.path.split(full_bag_name)
    filename, ext = os.path.splitext(bag_name_ext)

    ###############################################################################
    # Playback the bag
    bag_playback = Process(target=playback_bag, args=(full_bag_name, ))
    bag_playback.start()

    ###############################################################################
    ## Listen for transforms using rosbag
    rospy.init_node('bag_proceessor')

    tl = tf.TransformListener()

    print 'waiting for transform'
    tl.waitForTransform('map', 'base_footprint', rospy.Time(),
                        rospy.Duration(20))
    # Extract the starting location map_T_bf
    p_base = tfu.transform('map', 'base_footprint', tl) \
            * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
    t, r = tfu.matrix_as_tf(p_base)
    pose_base = (t, r)
    print 'done with tf'

    ##########################################################
    ## Find contact locations
    start_conditions = ut.load_pickle(experiment_start_condition_pkl)
    start_conditions['highdef_image'] = prosilica_image_file
    start_conditions['model_image'] = model_image_file
    rospy.loginfo('extracting object localization features')
    start_conditions[
        'pose_parameters'] = extract_object_localization_features2(
            start_conditions, tl, arm_used, p_base)

    if bag_playback.is_alive():
        rospy.loginfo('Terminating playback process')
        bag_playback.terminate()
        time.sleep(1)
        bag_playback.terminate()
        time.sleep(1)
        rospy.loginfo('Playback process terminated? %s' %
                      str(not bag_playback.is_alive()))

    ###############################################################################
    #Read bag using programmatic API
    pr2_kinematics = pr2k.PR2Kinematics(tl)
    converter = JointMsgConverter()
    rospy.loginfo('opening bag, reading state topics')
    topics_dict = ru.bag_sel(full_bag_name, [
        '/joint_states', '/l_cart/command_pose', '/r_cart/command_pose',
        '/torso_controller/state', '/pressure/l_gripper_motor',
        '/pressure/r_gripper_motor'
    ])

    ## Select the arm that has been moving, segment joint states based on contact states.
    if arm_used == 'left':
        pressures = topics_dict['/pressure/l_gripper_motor']
    elif arm_used == 'right':
        pressures = topics_dict['/pressure/r_gripper_motor']
    else:
        raise RuntimeError('arm_used invalid')

    ## find contact times
    rospy.loginfo('Finding contact times')
    left_f, right_f, ptimes = hpr2.pressure_state_to_mat(pressures['msg'])

    ## create segments based on contacts
    # TODO: make this accept more contact stages
    contact_times = find_contact_times(left_f, right_f, ptimes, 250)
    if len(contact_times) > 2:
        time_segments = [['start', contact_times[0]],
                         [contact_times[0], contact_times[-1]],
                         [contact_times[-1], 'end']]
    else:
        time_segments = [['start', 'end']]

    rospy.loginfo('Splitting messages based on contact times')
    ## split pressure readings based on contact times
    pressure_lseg = segment_msgs(
        time_segments, topics_dict['/pressure/l_gripper_motor']['msg'])
    pressure_rseg = segment_msgs(
        time_segments, topics_dict['/pressure/r_gripper_motor']['msg'])

    ## split cartesian commands based on contact times
    lcart_seg = segment_msgs(time_segments,
                             topics_dict['/l_cart/command_pose']['msg'])
    rcart_seg = segment_msgs(time_segments,
                             topics_dict['/r_cart/command_pose']['msg'])

    ## split joint states
    joint_states = topics_dict['/joint_states']['msg']
    print 'there are %d joint state messages in bag' % len(joint_states)

    j_segs = segment_msgs(time_segments, topics_dict['/joint_states']['msg'])
    jseg_dicts = [converter.msgs_to_dict(seg) for seg in j_segs]
    # find the first set of joint states
    j0_dict = jseg_dicts[0][0]

    ## perform FK
    rospy.loginfo('Performing FK to find tip locations')
    bf_T_obj = htf.composeHomogeneousTransform(
        start_conditions['pose_parameters']['frame_bf'],
        start_conditions['pose_parameters']['center_bf'])
    obj_T_bf = np.linalg.inv(bf_T_obj)
    for jseg_dict in jseg_dicts:
        for d in jseg_dict:
            rtip_bf = pr2_kinematics.right.fk('base_footprint',
                                              'r_wrist_roll_link',
                                              'r_gripper_tool_frame',
                                              d['poses']['rarm'].A1.tolist())
            ltip_bf = pr2_kinematics.left.fk('base_footprint',
                                             'l_wrist_roll_link',
                                             'l_gripper_tool_frame',
                                             d['poses']['larm'].A1.tolist())
            rtip_obj = obj_T_bf * rtip_bf
            ltip_obj = obj_T_bf * ltip_bf

            d['rtip_obj'] = tfu.matrix_as_tf(rtip_obj)
            d['ltip_obj'] = tfu.matrix_as_tf(ltip_obj)

            d['rtip_bf'] = tfu.matrix_as_tf(rtip_bf)
            d['ltip_bf'] = tfu.matrix_as_tf(ltip_bf)

    ###############################################################################
    # make movement state dictionaries, one for each state
    movement_states = []
    for i, seg in enumerate(time_segments):
        name = "state_%d" % i
        start_times = [
            lcart_seg[i][0].header.stamp.to_time(),
            rcart_seg[i][0].header.stamp.to_time(), jseg_dicts[i][0]['time'],
            pressure_lseg[i][0].header.stamp.to_time(),
            pressure_rseg[i][0].header.stamp.to_time()
        ]

        sdict = {
            'name':
            name,
            'start_time':
            np.min(start_times),
            'cartesian': [[ru.ros_to_dict(ps) for ps in lcart_seg[i]],
                          [ru.ros_to_dict(ps) for ps in rcart_seg[i]]],
            'joint_states':
            jseg_dicts[i]
            #'pressure': [pressure_lseg[i], pressure_rseg[i]]
        }

        movement_states.append(sdict)

    # store in a dict
    data = {
        'start_conditions':
        start_conditions,  # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points' (in base_frame), 
        # 'highdef_image', 'model_image',
        ## 'pose_parameters'
        ## 'descriptors'
        ## 'directions' (wrt to cannonical orientation)
        ## 'closest_feature'
        'base_pose': pose_base,
        'robot_pose': j0_dict,
        'arm': arm_used,
        'movement_states': movement_states
    }

    # save dicts to pickles
    processed_bag_name = '%s_processed.pkl' % os.path.join(bag_path, filename)
    rospy.loginfo('saving to %s' % processed_bag_name)
    ut.save_pickle(data, processed_bag_name)
    bag_playback.join()
    rospy.loginfo('finished!')
Exemple #54
0
 def get_pose(self):
     p_base = tfu.transform('map', 'base_footprint', self.tflistener) \
             * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemple #55
0
 def frame_loc(from_frame):
     p_base = tfu.transform('base_footprint', from_frame, self.tflistener) \
                    * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     #* tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)