Exemplo n.º 1
0
def script():
    rospy.init_node('forward_kin')
    tflistener = tf.TransformListener()
    print 'waiting for transform'
    tflistener.waitForTransform('r_gripper_tool_frame', 'r_wrist_roll_link', rospy.Time(), rospy.Duration(10))
    print 'waiting for services'
    rospy.wait_for_service('pr2_right_arm_kinematics/get_fk_solver_info')#, ks.GetKinematicSolverInfo )
    rospy.wait_for_service('pr2_right_arm_kinematics/get_fk')#,             ks.GetPositionFK)
    print 'done init'
    
    r_fk_info = rospy.ServiceProxy('pr2_right_arm_kinematics/get_fk_solver_info', ks.GetKinematicSolverInfo )
    r_fk      = rospy.ServiceProxy('pr2_right_arm_kinematics/get_fk',             ks.GetPositionFK)
    
    resp = r_fk_info()
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.joint_names
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.limits
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.link_names
    
    fk_req = ks.GetPositionFKRequest()
    fk_req.header.frame_id = 'torso_lift_link'
    fk_req.fk_link_names = ['r_wrist_roll_link']
    fk_req.robot_state.joint_state.name = resp.kinematic_solver_info.joint_names
    fk_req.robot_state.joint_state.position = [.5 for i in range(len(resp.kinematic_solver_info.joint_names))]
    fk_resp = r_fk(fk_req)
    
    rtip_T_wr = tfu.transform('r_gripper_tool_frame', 'r_wrist_roll_link', tflistener)
    right_wr = tfu.pose_as_matrix(fk_resp.pose_stamped[0].pose)
    rtip_pose = rtip_T_wr * right_wr
    print tfu.matrix_as_tf(rtip_pose)
Exemplo n.º 2
0
def kin_class():
    tflistener = tf.TransformListener()
    right = pr2k.PR2ArmKinematics('right', tflistener)
    rtip_pose = right.fk('torso_lift_link', 'r_wrist_roll_link', 'r_gripper_tool_frame',  [.5 for i in range(len(right.joint_names))])
    print tfu.matrix_as_tf(rtip_pose)

    left = pr2k.PR2ArmKinematics('left', tflistener)
    ltip_pose = left.fk('torso_lift_link', 'l_wrist_roll_link', 'l_gripper_tool_frame',  [.5 for i in range(len(left.joint_names))])
    print tfu.matrix_as_tf(ltip_pose)
Exemplo n.º 3
0
def kin_class():
    tflistener = tf.TransformListener()
    right = pr2k.PR2ArmKinematics('right', tflistener)
    rtip_pose = right.fk('torso_lift_link', 'r_wrist_roll_link',
                         'r_gripper_tool_frame',
                         [.5 for i in range(len(right.joint_names))])
    print tfu.matrix_as_tf(rtip_pose)

    left = pr2k.PR2ArmKinematics('left', tflistener)
    ltip_pose = left.fk('torso_lift_link', 'l_wrist_roll_link',
                        'l_gripper_tool_frame',
                        [.5 for i in range(len(left.joint_names))])
    print tfu.matrix_as_tf(ltip_pose)
Exemplo n.º 4
0
 def transform_point(self, point_stamped):
     point_head = point_stamped.point
     #Tranform into base link
     base_T_head = tfu.transform(self.target_frame, point_stamped.header.frame_id, self.tf_listener)
     point_mat_head = tfu.translation_matrix([point_head.x, point_head.y, point_head.z])
     point_mat_base = base_T_head * point_mat_head
     t_base, _ = tfu.matrix_as_tf(point_mat_base)
     return np.matrix(t_base).T
Exemplo n.º 5
0
    def record_diff(self, loc_mat):
        cur_t = time.time() - self.start_time
        if self.last_t == None or (cur_t - self.last_t) > (1./self.hz):
            pos, rot = tfu.matrix_as_tf(loc_mat)
            if self.last_pos != None:
                self.pos_diff.append(np.linalg.norm(np.matrix(pos) - np.matrix(self.last_pos[0])))

                lp = np.array(self.last_pos[1]) / np.linalg.norm(self.last_pos[1])
                r  = np.array(rot) / np.linalg.norm(rot)
                #pdb.set_trace()
                self.rot_diff.append(np.linalg.norm(lp - r))
                self.times.append(cur_t)

                sidx = len(self.pos_diff) - self.n_step
                self.pos_diff = self.pos_diff[sidx:]
                self.pose_diff = self.rot_diff[sidx:]

            self.last_pos = tfu.matrix_as_tf(loc_mat)
            self.last_t = cur_t
Exemplo n.º 6
0
 def omni_T_torso(self, torso_mat):
     l0_mat = tfu.transform(self.omni_name + '_center', '/torso_lift_link',
                            self.tflistener) * torso_mat
     l0_t = (np.array(tr.translation_from_matrix(l0_mat)) /
             np.array(self.scale_omni_l0)).tolist()
     l0_q = tr.quaternion_from_matrix(l0_mat)
     omni_pt_mat = tfu.transform(self.omni_name, self.omni_name + '_center',
                                 self.tflistener) * tfu.tf_as_matrix(
                                     (l0_t, l0_q))
     return tfu.matrix_as_tf(omni_pt_mat)
Exemplo n.º 7
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not(from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        p = np.row_stack((p, np.matrix([1.])))
        pp = self.P * p
        pp = pp / pp[2,:]
        return pp[0:2,:]
Exemplo n.º 8
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not(from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        p = np.row_stack((p, np.matrix([1.])))
        pp = self.P * p
        pp = pp / pp[2,0]
        return pp[0:2,0]
Exemplo n.º 9
0
    def print_pose(self):
        try:
            tfu.wait('map', 'base_footprint', self.tflistener, 1.)
            m_T_b = tfu.transform('map', 'base_footprint', self.tflistener)
            t, q = tfu.matrix_as_tf(m_T_b)

            print m_T_b
            print 't', t, 'q', q

        except Exception, e:
            print e
Exemplo n.º 10
0
    def _move_cartesian_ik(self, position, orientation, \
            stop_funcs=[], timeout = 30., settling_time = 0.25, \
            frame='base_link', vel=.15):
        #pdb.set_trace()
        #self.arm_obj.set_cart_pose_ik(cart_pose, total_time=motion_length, frame=frame, block=False)
        #cart_pose = tfu.tf_as_matrix((position.A1.tolist(), orientation.A1.tolist()))

        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame, rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame, self.tf_listener)
        cart_pose = tfu.tf_as_matrix((position.A1.tolist(), orientation.A1.tolist()))
        cart_pose = cart_pose * toolframe_T_ikframe
        position, orientation = tfu.matrix_as_tf(cart_pose)

        #goal_pose_ps = create_pose_stamped(position.A1.tolist() + orientation.A1.tolist(), frame)
        goal_pose_ps = cf.create_pose_stamped(position.tolist() + orientation.tolist(), frame)
        r = self.reactive_gr.cm.move_cartesian_ik(goal_pose_ps, blocking=0, step_size=.005, \
                pos_thres=.02, rot_thres=.1, timeout=rospy.Duration(timeout),
                settling_time=rospy.Duration(settling_time), vel=vel)
        if not (r == 'sent goal' or r == 'success'):
            return r

        #move_cartesian_ik(self, goal_pose, collision_aware = 0, blocking = 1,                            
        #                  step_size = .005, pos_thres = .02, rot_thres = .1,                                
        #                  timeout = rospy.Duration(30.),                                                    
        #                  settling_time = rospy.Duration(0.25), vel = .15):   

        stop_trigger = None
        done_time = None
        start_time = rospy.get_rostime()
        while stop_trigger == None:
            for f, name in stop_funcs:
                if f():
                    self.arm_obj.stop_trajectory_execution()
                    stop_trigger = name
                    rospy.loginfo('"%s" requested that motion should be stopped.' % (name))
                    break

            if timeout != 0. and rospy.get_rostime() - start_time > rospy.Duration(timeout):
                rospy.loginfo("_move_cartesian_ik: motion timed out")
                break

            if (not done_time) and (not self.arm_obj.has_active_goal()):
                #rospy.loginfo("check_cartesian_done returned 1")
                done_time = rospy.get_rostime()

            if done_time and rospy.get_rostime() - done_time > rospy.Duration(settling_time):
                rospy.loginfo("_move_cartesian_ik: done settling")
                break

        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        return stop_trigger
Exemplo n.º 11
0
    def record_diff(self, loc_mat):
        cur_t = time.time() - self.start_time
        if self.last_t == None or (cur_t - self.last_t) > (1. / self.hz):
            pos, rot = tfu.matrix_as_tf(loc_mat)
            if self.last_pos != None:
                self.pos_diff.append(
                    np.linalg.norm(
                        np.matrix(pos) - np.matrix(self.last_pos[0])))

                lp = np.array(self.last_pos[1]) / np.linalg.norm(
                    self.last_pos[1])
                r = np.array(rot) / np.linalg.norm(rot)
                #pdb.set_trace()
                self.rot_diff.append(np.linalg.norm(lp - r))
                self.times.append(cur_t)

                sidx = len(self.pos_diff) - self.n_step
                self.pos_diff = self.pos_diff[sidx:]
                self.pose_diff = self.rot_diff[sidx:]

            self.last_pos = tfu.matrix_as_tf(loc_mat)
            self.last_t = cur_t
Exemplo n.º 12
0
def script():
    rospy.init_node('forward_kin')
    tflistener = tf.TransformListener()
    print 'waiting for transform'
    tflistener.waitForTransform('r_gripper_tool_frame', 'r_wrist_roll_link',
                                rospy.Time(), rospy.Duration(10))
    print 'waiting for services'
    rospy.wait_for_service('pr2_right_arm_kinematics/get_fk_solver_info'
                           )  #, ks.GetKinematicSolverInfo )
    rospy.wait_for_service(
        'pr2_right_arm_kinematics/get_fk')  #,             ks.GetPositionFK)
    print 'done init'

    r_fk_info = rospy.ServiceProxy(
        'pr2_right_arm_kinematics/get_fk_solver_info',
        ks.GetKinematicSolverInfo)
    r_fk = rospy.ServiceProxy('pr2_right_arm_kinematics/get_fk',
                              ks.GetPositionFK)

    resp = r_fk_info()
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.joint_names
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.limits
    print 'get_fk_solver_info returned', resp.kinematic_solver_info.link_names

    fk_req = ks.GetPositionFKRequest()
    fk_req.header.frame_id = 'torso_lift_link'
    fk_req.fk_link_names = ['r_wrist_roll_link']
    fk_req.robot_state.joint_state.name = resp.kinematic_solver_info.joint_names
    fk_req.robot_state.joint_state.position = [
        .5 for i in range(len(resp.kinematic_solver_info.joint_names))
    ]
    fk_resp = r_fk(fk_req)

    rtip_T_wr = tfu.transform('r_gripper_tool_frame', 'r_wrist_roll_link',
                              tflistener)
    right_wr = tfu.pose_as_matrix(fk_resp.pose_stamped[0].pose)
    rtip_pose = rtip_T_wr * right_wr
    print tfu.matrix_as_tf(rtip_pose)
Exemplo n.º 13
0
    def transform_point(self, point_stamped):
        point_head = point_stamped.point

        #Tranform into base link
        target_link = '/base_link'
        base_T_head = tfu.transform(target_link, point_stamped.header.frame_id, self.tflistener)
        point_mat_head = tfu.translation_matrix([point_head.x, point_head.y, point_head.z])
        point_mat_base = base_T_head * point_mat_head
        t_base, o_base = tfu.matrix_as_tf(point_mat_base)

        #Calculate angle robot should face
        angle = math.atan2(t_base[1], t_base[0])
        q_base = tf.transformations.quaternion_from_euler(0, 0, angle)
        return (t_base, q_base, target_link)
Exemplo n.º 14
0
    def transform_point(self, point_stamped):
        point_head = point_stamped.point

        #Tranform into base link
        target_link = '/base_link'
        base_T_head = tfu.transform(target_link, point_stamped.header.frame_id, self.tflistener)
        point_mat_head = tfu.translation_matrix([point_head.x, point_head.y, point_head.z])
        point_mat_base = base_T_head * point_mat_head
        t_base, o_base = tfu.matrix_as_tf(point_mat_base)

        #Calculate angle robot should face
        angle = math.atan2(t_base[1], t_base[0])
        q = tf.transformations.quaternion_from_euler(0, 0, angle)
        return (t_base, q, target_link)
Exemplo n.º 15
0
    def ik(self, cart_pose, frame_of_pose='torso_lift_link', seed=None):
        #if frame_of_pose == self.tool_frame or frame_of_pose == None:

        self.tflistener.waitForTransform(self.ik_frame, self.tool_frame, rospy.Time(), rospy.Duration(10))
        #wr_T_toolframe = tfu.transform(sol_link, self.tool_frame, self.tflistener)
        #solframe_T_wr * wr_T_toolframe
        #print 'undoing'
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame, self.tflistener)
        cart_pose = cart_pose * toolframe_T_ikframe 
        #frame_of_pose = self.tool_frame

        trans, rot = tfu.matrix_as_tf(cart_pose)

        ik_req = ks.GetPositionIKRequest()
        ik_req.timeout = rospy.Duration(5.0)
        ik_req.ik_request.ik_link_name = self.ik_frame

        #set pose
        ik_req.ik_request.pose_stamped.header.frame_id = frame_of_pose
        ik_req.ik_request.pose_stamped.pose.position.x = trans[0]#cart_pose[0][0,0]
        ik_req.ik_request.pose_stamped.pose.position.y = trans[1]#cart_pose[0][1,0]
        ik_req.ik_request.pose_stamped.pose.position.z = trans[2]#cart_pose[0][2,0]

        ik_req.ik_request.pose_stamped.pose.orientation.x = rot[0]#cart_pose[1][0,0];
        ik_req.ik_request.pose_stamped.pose.orientation.y = rot[1]#cart_pose[1][1,0];
        ik_req.ik_request.pose_stamped.pose.orientation.z = rot[2]#cart_pose[1][2,0];
        ik_req.ik_request.pose_stamped.pose.orientation.w = rot[3]#cart_pose[1][3,0];

        #seed solver
        ik_req.ik_request.ik_seed_state.joint_state.name = self.ik_joint_names
        if seed == None:
            p = []
            for i in range(len(self.ik_joint_names)):
                minp = self.ik_info_resp.kinematic_solver_info.limits[i].min_position
                maxp = self.ik_info_resp.kinematic_solver_info.limits[i].max_position
                p.append((minp + maxp) / 2.0)
            ik_req.ik_request.ik_seed_state.joint_state.position = p
        else:
            if seed.__class__ == np.matrix:
                seed = seed.T.A1.tolist()
            ik_req.ik_request.ik_seed_state.joint_state.position = seed

        response = self._ik(ik_req)
        if response.error_code.val == response.error_code.SUCCESS:
            #print 'success'
            return np.matrix(response.solution.joint_state.position).T
        else:
            #print 'fail', response.__class__, response
            print response
            return None
Exemplo n.º 16
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not self.has_msg:
            raise RuntimeError('Has not been initialized with a CameraInfo message (call camera_info).')
        if not(from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        hrow = np.matrix(np.zeros((1,p.shape[1])))
        p = np.row_stack((p, hrow))
        pp = self.P * p
        pp = pp / pp[2,:]
        return pp[0:2,:]
Exemplo n.º 17
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not self.has_msg:
            raise RuntimeError(
                'Has not been initialized with a CameraInfo message (call camera_info).'
            )
        if not (from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        hrow = np.matrix(np.zeros((1, p.shape[1])))
        p = np.row_stack((p, hrow))
        pp = self.P * p
        pp = pp / pp[2, :]
        return pp[0:2, :]
Exemplo n.º 18
0
    def go_angle(self, target_base, tolerance=math.radians(5.), max_ang_vel=math.radians(20.), func=None):
        self.tl.waitForTransform('odom_combined', 'base_footprint', rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)
        k = math.radians(80)

        od_T_bf = tfu.transform('odom_combined', 'base_footprint', self.tl)
        target_odom_mat = od_T_bf * tfu.tf_as_matrix( ([0, 0, 0.], tr.quaternion_from_euler(0, 0, target_base)) )
        target_odom = tr.euler_from_quaternion(tfu.matrix_as_tf(target_odom_mat)[1])[2]
        #target_odom = (od_T_bf * np.row_stack([target_base, np.matrix([0,1.]).T]))[0:2,0]

        #current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint', 'odom_combined', self.tl)[0:3, 0:3], 'sxyz')[2]
        #target_odom = current_ang_odom + delta_ang

        robot_odom = tfu.transform('odom_combined', 'base_footprint', self.tl)
        current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
        diff = ut.standard_rad(current_ang_odom - target_odom)
        rospy.loginfo('go_angle: target %.2f' %  np.degrees(target_odom))
        rospy.loginfo('go_angle: current %.2f' %  np.degrees(current_ang_odom))
        rospy.loginfo('go_angle: diff %.2f' %  np.degrees(diff))

        while not rospy.is_shutdown():
            robot_odom = tfu.transform('odom_combined', 'base_footprint', self.tl)
            current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
            diff = ut.standard_rad(target_odom - current_ang_odom)
            rospy.loginfo('go_angle: current %.2f diff %.2f' %  (np.degrees(current_ang_odom), np.degrees(diff)))
            p = k * diff
            if func != None:
                func(diff)
            if np.abs(diff) < tolerance:
                break

            if self.go_ang_server.is_preempt_requested():
                self.go_ang_server.set_preempted()
                break

            tw = gm.Twist()
            vels = [p, np.sign(p) * max_ang_vel]
            tw.angular.z = vels[np.argmin(np.abs(vels))]
            #rospy.loginfo('diff %.3f vel %.3f' % (math.degrees(diff), math.degrees(tw.angular.z)))
            self.tw_pub.publish(tw)
            #rospy.loginfo('commanded %s' % str(tw))
            rate.sleep()
        rospy.loginfo('go_ang: finished %.3f' % math.degrees(diff))
        return diff
Exemplo n.º 19
0
    def torso_lift_link_T_omni(self, tip_omni, msg_frame):
        center_T_6 = tfu.transform(self.omni_name+'_center', msg_frame, self.tflistener)
        tip_center = center_T_6*tip_omni
        tip_center_t = (np.array(tr.translation_from_matrix(tip_center))*np.array(self.scale_omni_l0)).tolist()
        tip_center_q = tr.quaternion_from_matrix(tip_center)

        # z_T_6 = tfu.transform(self.omni_name+'_link0', msg_frame, self.tflistener)
        # tip_0 = z_T_6*tip_omni
        # tip_0t = (np.array(tr.translation_from_matrix(tip_0))*np.array(self.scale_omni_l0)).tolist()
        # tip_0q = tr.quaternion_from_matrix(tip_0)

    #     #Transform into torso frame so we can bound arm workspace
        tll_T_0 = tfu.transform('/torso_lift_link', self.omni_name + '_center', self.tflistener)
        tip_torso_mat = tll_T_0 * tfu.tf_as_matrix([tip_center_t, tip_center_q])
        tip_tt, tip_tq = tfu.matrix_as_tf(tip_torso_mat)

        #don't need to bound, arm controller should do this
    #     tip_tt[0] = limit_range(tip_tt[0], self.X_BOUNDS[0], self.X_BOUNDS[1])
        self.tip_tt = tip_tt
        self.tip_tq = tip_tq
Exemplo n.º 20
0
    def follow_point_cb(self, point_stamped):
        point_head = point_stamped.point

        base_T_head = tfu.transform('/base_link', point_stamped.header.frame_id, self.tflistener)
        point_mat_head = tfu.translation_matrix([point.x, point.y, point.z])
        point_mat_base = base_T_head * point_mat_head
        t_base, o_base = tfu.matrix_as_tf(point_mat_base)
        x = t_base[0]
        y = t_base[1]
        angle = math.atan2(y, x)

        ps = PoseStamped()
        ps.header.frame_id = '/base_link'
        ps.pose.position.x = x
        ps.pose.position.y = y
        ps.pose.position.z = 0.

        q = tf.transformations.quaternion_from_euler(angle, 0, 0)
        ps.pose.orientation.x = q[0]
        ps.pose.orientation.y = q[1]
        ps.pose.orientation.z = q[2]
        ps.pose.orientation.w = q[3]
        self.move_pub.publish(ps)
Exemplo n.º 21
0
    def torso_lift_link_T_omni(self, tip_omni, msg_frame):
        center_T_6 = tfu.transform(self.omni_name + '_center', msg_frame,
                                   self.tflistener)
        tip_center = center_T_6 * tip_omni
        tip_center_t = (np.array(tr.translation_from_matrix(tip_center)) *
                        np.array(self.scale_omni_l0)).tolist()
        tip_center_q = tr.quaternion_from_matrix(tip_center)

        # z_T_6 = tfu.transform(self.omni_name+'_link0', msg_frame, self.tflistener)
        # tip_0 = z_T_6*tip_omni
        # tip_0t = (np.array(tr.translation_from_matrix(tip_0))*np.array(self.scale_omni_l0)).tolist()
        # tip_0q = tr.quaternion_from_matrix(tip_0)

        #     #Transform into torso frame so we can bound arm workspace
        tll_T_0 = tfu.transform('/torso_lift_link', self.omni_name + '_center',
                                self.tflistener)
        tip_torso_mat = tll_T_0 * tfu.tf_as_matrix(
            [tip_center_t, tip_center_q])
        tip_tt, tip_tq = tfu.matrix_as_tf(tip_torso_mat)

        #don't need to bound, arm controller should do this
        #     tip_tt[0] = limit_range(tip_tt[0], self.X_BOUNDS[0], self.X_BOUNDS[1])
        self.tip_tt = tip_tt
        self.tip_tq = tip_tq
Exemplo n.º 22
0
    def ik(self, cart_pose, frame_of_pose='torso_lift_link', seed=None):
        #if frame_of_pose == self.tool_frame or frame_of_pose == None:

        self.tflistener.waitForTransform(self.ik_frame, self.tool_frame,
                                         rospy.Time(), rospy.Duration(10))
        #wr_T_toolframe = tfu.transform(sol_link, self.tool_frame, self.tflistener)
        #solframe_T_wr * wr_T_toolframe
        #print 'undoing'
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame,
                                            self.tflistener)
        cart_pose = cart_pose * toolframe_T_ikframe
        #frame_of_pose = self.tool_frame

        trans, rot = tfu.matrix_as_tf(cart_pose)

        ik_req = ks.GetPositionIKRequest()
        ik_req.timeout = rospy.Duration(5.0)
        ik_req.ik_request.ik_link_name = self.ik_frame

        #set pose
        ik_req.ik_request.pose_stamped.header.frame_id = frame_of_pose
        ik_req.ik_request.pose_stamped.pose.position.x = trans[
            0]  #cart_pose[0][0,0]
        ik_req.ik_request.pose_stamped.pose.position.y = trans[
            1]  #cart_pose[0][1,0]
        ik_req.ik_request.pose_stamped.pose.position.z = trans[
            2]  #cart_pose[0][2,0]

        ik_req.ik_request.pose_stamped.pose.orientation.x = rot[
            0]  #cart_pose[1][0,0];
        ik_req.ik_request.pose_stamped.pose.orientation.y = rot[
            1]  #cart_pose[1][1,0];
        ik_req.ik_request.pose_stamped.pose.orientation.z = rot[
            2]  #cart_pose[1][2,0];
        ik_req.ik_request.pose_stamped.pose.orientation.w = rot[
            3]  #cart_pose[1][3,0];

        #seed solver
        ik_req.ik_request.ik_seed_state.joint_state.name = self.ik_joint_names
        if seed == None:
            p = []
            for i in range(len(self.ik_joint_names)):
                minp = self.ik_info_resp.kinematic_solver_info.limits[
                    i].min_position
                maxp = self.ik_info_resp.kinematic_solver_info.limits[
                    i].max_position
                p.append((minp + maxp) / 2.0)
            ik_req.ik_request.ik_seed_state.joint_state.position = p
        else:
            if seed.__class__ == np.matrix:
                seed = seed.T.A1.tolist()
            ik_req.ik_request.ik_seed_state.joint_state.position = seed

        response = self._ik(ik_req)
        if response.error_code.val == response.error_code.SUCCESS:
            #print 'success'
            return np.matrix(response.solution.joint_state.position).T
        else:
            #print 'fail', response.__class__, response
            print response
            return None
Exemplo n.º 23
0
    def _move_cartesian_cart(self, position, orientation, \
            stop_funcs=[], timeout = 3.0, settling_time = 0.5, frame='base_link'):

        # TODO: Test this function...
        # Transform the pose from 'frame' to 'base_link'
        self.tf_listener.waitForTransform('base_link', frame, rospy.Time(),
                                          rospy.Duration(10))
        frame_T_base = tfu.transform('base_link', frame, self.tf_listener)
        init_cart_pose = tfu.tf_as_matrix(
            (position.A1.tolist(), orientation.A1.tolist()))
        base_cart_pose = frame_T_base * init_cart_pose

        # Get IK from tool frame to wrist frame for control input
        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame,
                                          rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame,
                                            self.tf_listener)
        #base_cart_pose = tfu.tf_as_matrix((base_position.A1.tolist(), base_orientation.A1.tolist()))
        base_cart_pose = base_cart_pose * toolframe_T_ikframe
        base_position, base_orientation = tfu.matrix_as_tf(base_cart_pose)

        pose_stamped = cf.create_pose_stamped(base_position.tolist() +
                                              base_orientation.tolist())
        rg = self.reactive_gr
        rg.check_preempt()

        #send the goal to the Cartesian controllers
        #rospy.loginfo("sending goal to Cartesian controllers")
        (pos, rot) = cf.pose_stamped_to_lists(rg.cm.tf_listener, pose_stamped,
                                              'base_link')
        rg.move_cartesian_step(pos + rot, timeout, settling_time)

        #watch the fingertip/palm sensors until the controllers are done and then some
        start_time = rospy.get_rostime()
        done_time = None
        #stopped = False
        stop_trigger = None
        #print 'enterning loop'
        stop_detector = ArmStoppedDetector()
        while (1):

            rg.check_preempt()
            if len(stop_funcs) > 0:
                for f, name in stop_funcs:
                    if f():
                        rg.cm.switch_to_joint_mode()
                        rg.cm.freeze_arm()
                        #stopped = True
                        stop_trigger = name
                        rospy.loginfo(
                            '"%s" requested that motion should be stopped.' %
                            (name))
                        break
                if stop_trigger != None:
                    break

            #if stop_func != None and stop_func():
            #    rg.cm.switch_to_joint_mode()
            #    rg.cm.freeze_arm()
            #    stopped = True
            #    break

            #check if we're actually there
            if rg.cm.check_cartesian_near_pose(pose_stamped, .0025, .05):
                rospy.loginfo("_move_cartesian_cart: reached pose")
                #stop_trigger = 'at_pose'
                break

            stop_detector.record_diff(self.arm_obj.pose_cartesian())
            if stop_detector.is_stopped():
                rospy.loginfo("_move_cartesian_cart: arm stopped")
                #stop_trigger = 'stopped'
                break

            # if rg.cm.check_cartesian_really_done(pose_stamped, .0025, .05):
            #     #rospy.loginfo("actually got there")
            #     break
            #
            # #check if the controllers think we're done
            # if not done_time and rg.cm.check_cartesian_done():
            #     #rospy.loginfo("check_cartesian_done returned 1")
            #     done_time = rospy.get_rostime()

            # #done settling
            # if done_time and rospy.get_rostime() - done_time > rospy.Duration(settling_time):
            #     rospy.loginfo("done settling")
            #     break

            #timed out
            #if timeout != 0. and rospy.get_rostime() - start_time > rospy.Duration(timeout):
            #    rospy.loginfo("_move_cartesian_cart: timed out")
            #    break

        #if stop_trigger == 'pressure_safety' or stop_trigger == 'self_collision':
        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        #name = ut.formatted_time() + '_stop_detector.pkl'
        #print 'saved', name
        #ut.save_pickle(stop_detector, name)
        return stop_trigger
Exemplo n.º 24
0
 def frame_loc(from_frame):
     p_base = tfu.transform('base_footprint', from_frame, self.tflistener) \
                    * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     #* tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemplo n.º 25
0
def process_bag(full_bag_name,
                prosilica_image_file,
                model_image_file,
                experiment_start_condition_pkl,
                arm_used='left'):
    bag_path, bag_name_ext = os.path.split(full_bag_name)
    filename, ext = os.path.splitext(bag_name_ext)

    ###############################################################################
    # Playback the bag
    bag_playback = Process(target=playback_bag, args=(full_bag_name, ))
    bag_playback.start()

    ###############################################################################
    ## Listen for transforms using rosbag
    rospy.init_node('bag_proceessor')

    tl = tf.TransformListener()

    print 'waiting for transform'
    tl.waitForTransform('map', 'base_footprint', rospy.Time(),
                        rospy.Duration(20))
    # Extract the starting location map_T_bf
    p_base = tfu.transform('map', 'base_footprint', tl) \
            * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
    t, r = tfu.matrix_as_tf(p_base)
    pose_base = (t, r)
    print 'done with tf'

    ##########################################################
    ## Find contact locations
    start_conditions = ut.load_pickle(experiment_start_condition_pkl)
    start_conditions['highdef_image'] = prosilica_image_file
    start_conditions['model_image'] = model_image_file
    rospy.loginfo('extracting object localization features')
    start_conditions[
        'pose_parameters'] = extract_object_localization_features2(
            start_conditions, tl, arm_used, p_base)

    if bag_playback.is_alive():
        rospy.loginfo('Terminating playback process')
        bag_playback.terminate()
        time.sleep(1)
        bag_playback.terminate()
        time.sleep(1)
        rospy.loginfo('Playback process terminated? %s' %
                      str(not bag_playback.is_alive()))

    ###############################################################################
    #Read bag using programmatic API
    pr2_kinematics = pr2k.PR2Kinematics(tl)
    converter = JointMsgConverter()
    rospy.loginfo('opening bag, reading state topics')
    topics_dict = ru.bag_sel(full_bag_name, [
        '/joint_states', '/l_cart/command_pose', '/r_cart/command_pose',
        '/torso_controller/state', '/pressure/l_gripper_motor',
        '/pressure/r_gripper_motor'
    ])

    ## Select the arm that has been moving, segment joint states based on contact states.
    if arm_used == 'left':
        pressures = topics_dict['/pressure/l_gripper_motor']
    elif arm_used == 'right':
        pressures = topics_dict['/pressure/r_gripper_motor']
    else:
        raise RuntimeError('arm_used invalid')

    ## find contact times
    rospy.loginfo('Finding contact times')
    left_f, right_f, ptimes = hpr2.pressure_state_to_mat(pressures['msg'])

    ## create segments based on contacts
    # TODO: make this accept more contact stages
    contact_times = find_contact_times(left_f, right_f, ptimes, 250)
    if len(contact_times) > 2:
        time_segments = [['start', contact_times[0]],
                         [contact_times[0], contact_times[-1]],
                         [contact_times[-1], 'end']]
    else:
        time_segments = [['start', 'end']]

    rospy.loginfo('Splitting messages based on contact times')
    ## split pressure readings based on contact times
    pressure_lseg = segment_msgs(
        time_segments, topics_dict['/pressure/l_gripper_motor']['msg'])
    pressure_rseg = segment_msgs(
        time_segments, topics_dict['/pressure/r_gripper_motor']['msg'])

    ## split cartesian commands based on contact times
    lcart_seg = segment_msgs(time_segments,
                             topics_dict['/l_cart/command_pose']['msg'])
    rcart_seg = segment_msgs(time_segments,
                             topics_dict['/r_cart/command_pose']['msg'])

    ## split joint states
    joint_states = topics_dict['/joint_states']['msg']
    print 'there are %d joint state messages in bag' % len(joint_states)

    j_segs = segment_msgs(time_segments, topics_dict['/joint_states']['msg'])
    jseg_dicts = [converter.msgs_to_dict(seg) for seg in j_segs]
    # find the first set of joint states
    j0_dict = jseg_dicts[0][0]

    ## perform FK
    rospy.loginfo('Performing FK to find tip locations')
    bf_T_obj = htf.composeHomogeneousTransform(
        start_conditions['pose_parameters']['frame_bf'],
        start_conditions['pose_parameters']['center_bf'])
    obj_T_bf = np.linalg.inv(bf_T_obj)
    for jseg_dict in jseg_dicts:
        for d in jseg_dict:
            rtip_bf = pr2_kinematics.right.fk('base_footprint',
                                              'r_wrist_roll_link',
                                              'r_gripper_tool_frame',
                                              d['poses']['rarm'].A1.tolist())
            ltip_bf = pr2_kinematics.left.fk('base_footprint',
                                             'l_wrist_roll_link',
                                             'l_gripper_tool_frame',
                                             d['poses']['larm'].A1.tolist())
            rtip_obj = obj_T_bf * rtip_bf
            ltip_obj = obj_T_bf * ltip_bf

            d['rtip_obj'] = tfu.matrix_as_tf(rtip_obj)
            d['ltip_obj'] = tfu.matrix_as_tf(ltip_obj)

            d['rtip_bf'] = tfu.matrix_as_tf(rtip_bf)
            d['ltip_bf'] = tfu.matrix_as_tf(ltip_bf)

    ###############################################################################
    # make movement state dictionaries, one for each state
    movement_states = []
    for i, seg in enumerate(time_segments):
        name = "state_%d" % i
        start_times = [
            lcart_seg[i][0].header.stamp.to_time(),
            rcart_seg[i][0].header.stamp.to_time(), jseg_dicts[i][0]['time'],
            pressure_lseg[i][0].header.stamp.to_time(),
            pressure_rseg[i][0].header.stamp.to_time()
        ]

        sdict = {
            'name':
            name,
            'start_time':
            np.min(start_times),
            'cartesian': [[ru.ros_to_dict(ps) for ps in lcart_seg[i]],
                          [ru.ros_to_dict(ps) for ps in rcart_seg[i]]],
            'joint_states':
            jseg_dicts[i]
            #'pressure': [pressure_lseg[i], pressure_rseg[i]]
        }

        movement_states.append(sdict)

    # store in a dict
    data = {
        'start_conditions':
        start_conditions,  # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points' (in base_frame), 
        # 'highdef_image', 'model_image',
        ## 'pose_parameters'
        ## 'descriptors'
        ## 'directions' (wrt to cannonical orientation)
        ## 'closest_feature'
        'base_pose': pose_base,
        'robot_pose': j0_dict,
        'arm': arm_used,
        'movement_states': movement_states
    }

    # save dicts to pickles
    processed_bag_name = '%s_processed.pkl' % os.path.join(bag_path, filename)
    rospy.loginfo('saving to %s' % processed_bag_name)
    ut.save_pickle(data, processed_bag_name)
    bag_playback.join()
    rospy.loginfo('finished!')
Exemplo n.º 26
0
 def frame_loc(from_frame):
     p_base = tfu.transform(self.base_frame, from_frame, self.tflistener) \
                    * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemplo n.º 27
0
 def frame_loc(from_frame):
     p_base = tfu.transform(self.base_frame, from_frame, self.tflistener) \
                    * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemplo n.º 28
0
 def pose_cartesian_tf(self, frame='base_link'):
     p, r = tfu.matrix_as_tf(self.pose_cartesian(frame))
     return np.matrix(p).T, np.matrix(r).T
Exemplo n.º 29
0
 def omni_T_torso(self, torso_mat):
     l0_mat = tfu.transform(self.omni_name + '_center', '/torso_lift_link', self.tflistener) * torso_mat
     l0_t = (np.array(tr.translation_from_matrix(l0_mat)) / np.array(self.scale_omni_l0)).tolist()
     l0_q = tr.quaternion_from_matrix(l0_mat)
     omni_pt_mat = tfu.transform(self.omni_name, self.omni_name + '_center', self.tflistener) * tfu.tf_as_matrix((l0_t, l0_q))
     return tfu.matrix_as_tf(omni_pt_mat)
Exemplo n.º 30
0
        rospy.init_node('test_linear_move')
        arm = 'r'
        tflistener = tf.TransformListener()
        robot = pr2.PR2(tflistener)
        movement = LinearReactiveMovement(arm, robot, tflistener)

        if mode == 'save':
            poses = []
            print 'going.....'
            while True:
                print 'waiting for input'
                r = raw_input()
                if r != 's':
                    print 'getting pose.'
                    p = movement.arm_obj.pose_cartesian()
                    print 'pose is', p
                    poses.append(p)
                else:
                    break

            ut.save_pickle(poses, 'saved_poses.pkl')

        elif mode == 'run':
            poses = ut.load_pickle('saved_poses.pkl')
            for p in poses:
                print 'hit enter to move'
                r = raw_input()
                pos, rot = tfu.matrix_as_tf(p)
                movement.set_movement_mode_cart()
                movement.move_absolute2((np.matrix(pos), np.matrix(rot)))
Exemplo n.º 31
0
        rospy.init_node('test_linear_move')
        arm = 'r'
        tflistener = tf.TransformListener()
        robot = pr2.PR2(tflistener)
        movement = LinearReactiveMovement(arm, robot, tflistener)

        if mode == 'save':
            poses = []
            print 'going.....'
            while True:
                print 'waiting for input'
                r = raw_input()
                if r != 's':
                    print 'getting pose.'
                    p = movement.arm_obj.pose_cartesian()
                    print 'pose is', p
                    poses.append(p)
                else:
                    break

            ut.save_pickle(poses, 'saved_poses.pkl')

        elif mode == 'run':
            poses = ut.load_pickle('saved_poses.pkl')
            for p in poses:
                print 'hit enter to move'
                r = raw_input()
                pos, rot = tfu.matrix_as_tf(p)
                movement.set_movement_mode_cart()
                movement.move_absolute2((np.matrix(pos), np.matrix(rot)))
Exemplo n.º 32
0
    def _move_cartesian_ik(self, position, orientation, \
            stop_funcs=[], timeout = 30., settling_time = 0.25, \
            frame='base_link', vel=.15):
        #pdb.set_trace()
        #self.arm_obj.set_cart_pose_ik(cart_pose, total_time=motion_length, frame=frame, block=False)
        #cart_pose = tfu.tf_as_matrix((position.A1.tolist(), orientation.A1.tolist()))

        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame,
                                          rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame,
                                            self.tf_listener)
        cart_pose = tfu.tf_as_matrix(
            (position.A1.tolist(), orientation.A1.tolist()))
        cart_pose = cart_pose * toolframe_T_ikframe
        position, orientation = tfu.matrix_as_tf(cart_pose)

        #goal_pose_ps = create_pose_stamped(position.A1.tolist() + orientation.A1.tolist(), frame)
        goal_pose_ps = cf.create_pose_stamped(
            position.tolist() + orientation.tolist(), frame)
        r = self.reactive_gr.cm.move_cartesian_ik(goal_pose_ps, blocking=0, step_size=.005, \
                pos_thres=.02, rot_thres=.1, timeout=rospy.Duration(timeout),
                settling_time=rospy.Duration(settling_time), vel=vel)
        if not (r == 'sent goal' or r == 'success'):
            return r

        #move_cartesian_ik(self, goal_pose, collision_aware = 0, blocking = 1,
        #                  step_size = .005, pos_thres = .02, rot_thres = .1,
        #                  timeout = rospy.Duration(30.),
        #                  settling_time = rospy.Duration(0.25), vel = .15):

        stop_trigger = None
        done_time = None
        start_time = rospy.get_rostime()
        while stop_trigger == None:
            for f, name in stop_funcs:
                if f():
                    self.arm_obj.stop_trajectory_execution()
                    stop_trigger = name
                    rospy.loginfo(
                        '"%s" requested that motion should be stopped.' %
                        (name))
                    break

            if timeout != 0. and rospy.get_rostime(
            ) - start_time > rospy.Duration(timeout):
                rospy.loginfo("_move_cartesian_ik: motion timed out")
                break

            if (not done_time) and (not self.arm_obj.has_active_goal()):
                #rospy.loginfo("check_cartesian_done returned 1")
                done_time = rospy.get_rostime()

            if done_time and rospy.get_rostime() - done_time > rospy.Duration(
                    settling_time):
                rospy.loginfo("_move_cartesian_ik: done settling")
                break

        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        return stop_trigger
Exemplo n.º 33
0
    def _move_cartesian_cart(self, position, orientation, \
            stop_funcs=[], timeout = 3.0, settling_time = 0.5, frame='base_link'):

        # TODO: Test this function...
        # Transform the pose from 'frame' to 'base_link'
        self.tf_listener.waitForTransform('base_link', frame, rospy.Time(),
                                          rospy.Duration(10))
        frame_T_base = tfu.transform('base_link', frame, self.tf_listener)
        init_cart_pose = tfu.tf_as_matrix((position.A1.tolist(),
                                           orientation.A1.tolist()))
        base_cart_pose = frame_T_base*init_cart_pose

        # Get IK from tool frame to wrist frame for control input
        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame, rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame, self.tf_listener)
        #base_cart_pose = tfu.tf_as_matrix((base_position.A1.tolist(), base_orientation.A1.tolist()))
        base_cart_pose = base_cart_pose * toolframe_T_ikframe
        base_position, base_orientation = tfu.matrix_as_tf(base_cart_pose)

        pose_stamped = cf.create_pose_stamped(base_position.tolist() + base_orientation.tolist())
        rg = self.reactive_gr
        rg.check_preempt()

        #send the goal to the Cartesian controllers
        #rospy.loginfo("sending goal to Cartesian controllers")
        (pos, rot) = cf.pose_stamped_to_lists(rg.cm.tf_listener, pose_stamped, 'base_link')
        rg.move_cartesian_step(pos+rot, timeout, settling_time)

        #watch the fingertip/palm sensors until the controllers are done and then some
        start_time = rospy.get_rostime()
        done_time = None
        #stopped = False
        stop_trigger = None
        #print 'enterning loop'
        stop_detector = ArmStoppedDetector()
        while(1):

            rg.check_preempt()
            if len(stop_funcs) > 0:
                for f, name in stop_funcs:
                    if f():
                        rg.cm.switch_to_joint_mode()
                        rg.cm.freeze_arm()
                        #stopped = True
                        stop_trigger = name
                        rospy.loginfo('"%s" requested that motion should be stopped.' % (name))
                        break
                if stop_trigger != None:
                    break

            #if stop_func != None and stop_func():
            #    rg.cm.switch_to_joint_mode()
            #    rg.cm.freeze_arm()
            #    stopped = True
            #    break

            #check if we're actually there
            if rg.cm.check_cartesian_near_pose(pose_stamped, .0025, .05):
                rospy.loginfo("_move_cartesian_cart: reached pose")
                #stop_trigger = 'at_pose'
                break

            stop_detector.record_diff(self.arm_obj.pose_cartesian())
            if stop_detector.is_stopped():
                rospy.loginfo("_move_cartesian_cart: arm stopped")
                #stop_trigger = 'stopped'
                break

            # if rg.cm.check_cartesian_really_done(pose_stamped, .0025, .05):
            #     #rospy.loginfo("actually got there")
            #     break
            #
            # #check if the controllers think we're done
            # if not done_time and rg.cm.check_cartesian_done():
            #     #rospy.loginfo("check_cartesian_done returned 1")
            #     done_time = rospy.get_rostime()

            # #done settling
            # if done_time and rospy.get_rostime() - done_time > rospy.Duration(settling_time):
            #     rospy.loginfo("done settling")
            #     break

            #timed out
            #if timeout != 0. and rospy.get_rostime() - start_time > rospy.Duration(timeout):
            #    rospy.loginfo("_move_cartesian_cart: timed out")
            #    break

        #if stop_trigger == 'pressure_safety' or stop_trigger == 'self_collision':
        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        #name = ut.formatted_time() + '_stop_detector.pkl'
        #print 'saved', name
        #ut.save_pickle(stop_detector, name)
        return stop_trigger
Exemplo n.º 34
0
 def pose_cartesian_tf(self, frame='base_link'):
     p, r = tfu.matrix_as_tf(self.pose_cartesian(frame))
     return np.matrix(p).T, np.matrix(r).T
Exemplo n.º 35
0
def process_bag(full_bag_name, prosilica_image_file, model_image_file, experiment_start_condition_pkl, arm_used='left'):
    bag_path, bag_name_ext = os.path.split(full_bag_name)
    filename, ext = os.path.splitext(bag_name_ext)
    
    ###############################################################################
    # Playback the bag
    bag_playback = Process(target=playback_bag, args=(full_bag_name,))
    bag_playback.start()

    ###############################################################################
    ## Listen for transforms using rosbag
    rospy.init_node('bag_proceessor')

    tl = tf.TransformListener()

    print 'waiting for transform'
    tl.waitForTransform('map', 'base_footprint', rospy.Time(), rospy.Duration(20))
    # Extract the starting location map_T_bf
    p_base = tfu.transform('map', 'base_footprint', tl) \
            * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
    t, r = tfu.matrix_as_tf(p_base)
    pose_base = (t, r)
    print 'done with tf'

    ##########################################################
    ## Find contact locations
    start_conditions = ut.load_pickle(experiment_start_condition_pkl)
    start_conditions['highdef_image'] = prosilica_image_file
    start_conditions['model_image'] = model_image_file
    rospy.loginfo('extracting object localization features')
    start_conditions['pose_parameters'] = extract_object_localization_features2(start_conditions, tl, arm_used, p_base)

    if bag_playback.is_alive():
        rospy.loginfo('Terminating playback process')
        bag_playback.terminate()
        time.sleep(1)
        bag_playback.terminate()
        time.sleep(1)
        rospy.loginfo('Playback process terminated? %s' % str(not bag_playback.is_alive()))


    ###############################################################################
    #Read bag using programmatic API
    pr2_kinematics = pr2k.PR2Kinematics(tl)
    converter = JointMsgConverter()
    rospy.loginfo('opening bag, reading state topics')
    topics_dict = ru.bag_sel(full_bag_name, ['/joint_states', '/l_cart/command_pose', 
                                             '/r_cart/command_pose', '/torso_controller/state',
                                             '/pressure/l_gripper_motor', '/pressure/r_gripper_motor'])

    ## Select the arm that has been moving, segment joint states based on contact states.
    if arm_used == 'left':
        pressures = topics_dict['/pressure/l_gripper_motor']
    elif arm_used == 'right':
        pressures = topics_dict['/pressure/r_gripper_motor']
    else:
        raise RuntimeError('arm_used invalid')

    ## find contact times
    rospy.loginfo('Finding contact times')
    left_f, right_f, ptimes = hpr2.pressure_state_to_mat(pressures['msg'])

    ## create segments based on contacts
    # TODO: make this accept more contact stages
    contact_times = find_contact_times(left_f, right_f, ptimes, 250)
    if len(contact_times) > 2:
        time_segments = [['start', contact_times[0]], [contact_times[0], contact_times[-1]], [contact_times[-1], 'end']]
    else:
        time_segments = [['start', 'end']]

    rospy.loginfo('Splitting messages based on contact times')
    ## split pressure readings based on contact times
    pressure_lseg = segment_msgs(time_segments, topics_dict['/pressure/l_gripper_motor']['msg'])
    pressure_rseg = segment_msgs(time_segments, topics_dict['/pressure/r_gripper_motor']['msg'])

    ## split cartesian commands based on contact times
    lcart_seg = segment_msgs(time_segments, topics_dict['/l_cart/command_pose']['msg'])
    rcart_seg = segment_msgs(time_segments, topics_dict['/r_cart/command_pose']['msg'])

    ## split joint states
    joint_states = topics_dict['/joint_states']['msg']
    print 'there are %d joint state messages in bag' % len(joint_states)

    j_segs     = segment_msgs(time_segments, topics_dict['/joint_states']['msg'])
    jseg_dicts = [converter.msgs_to_dict(seg) for seg in j_segs]
    # find the first set of joint states
    j0_dict    = jseg_dicts[0][0]

    ## perform FK
    rospy.loginfo('Performing FK to find tip locations')
    bf_T_obj = htf.composeHomogeneousTransform(start_conditions['pose_parameters']['frame_bf'], 
                                               start_conditions['pose_parameters']['center_bf'])
    obj_T_bf = np.linalg.inv(bf_T_obj)
    for jseg_dict in jseg_dicts:
        for d in jseg_dict:
            rtip_bf = pr2_kinematics.right.fk('base_footprint',
                    'r_wrist_roll_link', 'r_gripper_tool_frame',
                    d['poses']['rarm'].A1.tolist())
            ltip_bf =  pr2_kinematics.left.fk('base_footprint',
                    'l_wrist_roll_link', 'l_gripper_tool_frame',
                    d['poses']['larm'].A1.tolist())
            rtip_obj = obj_T_bf * rtip_bf
            ltip_obj = obj_T_bf * ltip_bf

            d['rtip_obj'] = tfu.matrix_as_tf(rtip_obj)
            d['ltip_obj'] = tfu.matrix_as_tf(ltip_obj)

            d['rtip_bf'] = tfu.matrix_as_tf(rtip_bf)
            d['ltip_bf'] = tfu.matrix_as_tf(ltip_bf)
    
    ###############################################################################
    # make movement state dictionaries, one for each state
    movement_states = []
    for i, seg in enumerate(time_segments):
        name = "state_%d" % i
        start_times = [lcart_seg[i][0].header.stamp.to_time(), 
                       rcart_seg[i][0].header.stamp.to_time(), 
                       jseg_dicts[i][0]['time'],
                       pressure_lseg[i][0].header.stamp.to_time(), 
                       pressure_rseg[i][0].header.stamp.to_time()]

        sdict = {'name': name,
                 'start_time': np.min(start_times),
                 'cartesian': [[ru.ros_to_dict(ps) for ps in lcart_seg[i]], 
                               [ru.ros_to_dict(ps) for ps in rcart_seg[i]]],
                 'joint_states': jseg_dicts[i]
                 #'pressure': [pressure_lseg[i], pressure_rseg[i]]
                 } 

        movement_states.append(sdict)

    # store in a dict
    data = {'start_conditions': start_conditions, # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points' (in base_frame), 
                                                  # 'highdef_image', 'model_image',
                                                    ## 'pose_parameters'
                                                        ## 'descriptors'
                                                        ## 'directions' (wrt to cannonical orientation)
                                                        ## 'closest_feature'
            'base_pose': pose_base, 
            'robot_pose': j0_dict,
            'arm': arm_used,
            'movement_states': movement_states}

    # save dicts to pickles
    processed_bag_name = '%s_processed.pkl' % os.path.join(bag_path, filename)
    rospy.loginfo('saving to %s' % processed_bag_name)
    ut.save_pickle(data, processed_bag_name)
    bag_playback.join()
    rospy.loginfo('finished!')
Exemplo n.º 36
0
 def get_pose(self):
     p_base = tfu.transform('map', 'base_footprint', self.tflistener) \
             * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemplo n.º 37
0
 def get_pose(self):
     p_base = tfu.transform('map', 'base_footprint', self.tflistener) \
             * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)