def __init__(self, optical_frame, tf_listener=None): if tf_listener == None: tf_listener = tf.TransformListener() self.tf_listener = tf_listener self.optical_frame = optical_frame self.robot = pr2.PR2(self.tf_listener, base=True) self.behaviors = ManipulationBehaviors('l', self.robot, tf_listener=self.tf_listener) self.laser_scan = hd.LaserScanner('point_cloud_srv') self.prosilica = rc.Prosilica('prosilica', 'polled') self.wide_angle_camera_left = rc.ROSCamera('/wide_stereo/left/image_rect_color') self.wide_angle_configure = dr.Client('wide_stereo_both') #TODO: define start location in frame attached to torso instead of base_link self.start_location_light_switch = (np.matrix([0.35, 0.30, 1.1]).T, np.matrix([0., 0., 0., 0.1])) self.start_location_drawer = (np.matrix([0.20, 0.40, .8]).T, np.matrix(tr.quaternion_from_euler(np.radians(90.), 0, 0))) self.folded_pose = np.matrix([ 0.10134791, -0.29295995, 0.41193769]).T self.driving_param = {'light_switch_up': {'coarse': .9, 'fine': .6, 'voi': .4}, 'light_switch_down': {'coarse': .9, 'fine': .6, 'voi': .4}, 'light_rocker_down': {'coarse': .9, 'fine': .6, 'voi': .4}, 'light_rocker_up': {'coarse': .9, 'fine': .6, 'voi': .4}, 'pull_drawer': {'coarse': .9, 'fine': .5, 'voi': .4}, 'push_drawer': {'coarse': .9, 'fine': .5, 'voi': .4}} self.create_arm_poses() self.driving_posture('light_switch_down') self.robot.projector.set(False)
def __init__(self): rospy.init_node('tabletop_push_node', log_level=rospy.DEBUG) self.torso_z_offset = rospy.get_param('~torso_z_offset', 0.15) self.look_pt_x = rospy.get_param('~look_point_x', 0.45) self.head_pose_cam_frame = rospy.get_param('~head_pose_cam_frame', 'openni_rgb_frame') self.default_torso_height = rospy.get_param('~default_torso_height', 0.2) self.gripper_raise_dist = rospy.get_param( '~graipper_post_push_raise_dist', 0.05) use_slip = rospy.get_param('~use_slip_detection', 1) self.tf_listener = tf.TransformListener() # Set joint gains prefix = roslib.packages.get_pkg_dir('hrl_pr2_arms') + '/params/' cs = ControllerSwitcher() rospy.loginfo( cs.switch("r_arm_controller", "r_arm_controller", prefix + "pr2_arm_controllers_push.yaml")) rospy.loginfo( cs.switch("l_arm_controller", "l_arm_controller", prefix + "pr2_arm_controllers_push.yaml")) # Setup arms rospy.loginfo('Creating pr2 object') self.robot = pr2.PR2(self.tf_listener, arms=True, base=False) rospy.loginfo('Setting up left arm move') self.left_arm_move = lm.LinearReactiveMovement('l', self.robot, self.tf_listener, use_slip, use_slip) rospy.loginfo('Setting up right arm move') self.right_arm_move = lm.LinearReactiveMovement( 'r', self.robot, self.tf_listener, use_slip, use_slip) self.push_pose_proxy = rospy.ServiceProxy('get_push_pose', PushPose) self.gripper_push_service = rospy.Service('gripper_push', GripperPush, self.gripper_push_action) self.gripper_pre_push_service = rospy.Service( 'gripper_pre_push', GripperPush, self.gripper_pre_push_action) self.gripper_post_push_service = rospy.Service( 'gripper_post_push', GripperPush, self.gripper_post_push_action) self.gripper_sweep_service = rospy.Service('gripper_sweep', GripperPush, self.gripper_sweep_action) self.gripper_pre_sweep_service = rospy.Service( 'gripper_pre_sweep', GripperPush, self.gripper_pre_sweep_action) self.gripper_post_sweep_service = rospy.Service( 'gripper_post_sweep', GripperPush, self.gripper_post_sweep_action) self.overhead_push_service = rospy.Service('overhead_push', GripperPush, self.overhead_push_action) self.overhead_pre_push_service = rospy.Service( 'overhead_pre_push', GripperPush, self.overhead_pre_push_action) self.overhead_post_push_service = rospy.Service( 'overhead_post_push', GripperPush, self.overhead_post_push_action) self.raise_and_look_serice = rospy.Service('raise_and_look', RaiseAndLook, self.raise_and_look_action)
import sys a = ut.load_pickle(sys.argv[1]) print len(a.pos_diff) pb.plot(a.pos_diff) pb.show() exit(0) else: import hrl_pr2_lib.pr2 as pr2 rospy.init_node('test_linear_move') arm = 'r' tflistener = tf.TransformListener() robot = pr2.PR2(tflistener) movement = LinearReactiveMovement(arm, robot, tflistener) if mode == 'save': poses = [] print 'going.....' while True: print 'waiting for input' r = raw_input() if r != 's': print 'getting pose.' p = movement.arm_obj.pose_cartesian() print 'pose is', p poses.append(p) else: break