Exemplo n.º 1
0
 def omni_T_torso(self, torso_mat):
     l0_mat = tfu.transform(self.omni_name + '_center', '/torso_lift_link',
                            self.tflistener) * torso_mat
     l0_t = (np.array(tr.translation_from_matrix(l0_mat)) /
             np.array(self.scale_omni_l0)).tolist()
     l0_q = tr.quaternion_from_matrix(l0_mat)
     omni_pt_mat = tfu.transform(self.omni_name, self.omni_name + '_center',
                                 self.tflistener) * tfu.tf_as_matrix(
                                     (l0_t, l0_q))
     return tfu.matrix_as_tf(omni_pt_mat)
Exemplo n.º 2
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not(from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        p = np.row_stack((p, np.matrix([1.])))
        pp = self.P * p
        pp = pp / pp[2,:]
        return pp[0:2,:]
Exemplo n.º 3
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not(from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        p = np.row_stack((p, np.matrix([1.])))
        pp = self.P * p
        pp = pp / pp[2,0]
        return pp[0:2,0]
Exemplo n.º 4
0
    def _move_cartesian_ik(self, position, orientation, \
            stop_funcs=[], timeout = 30., settling_time = 0.25, \
            frame='base_link', vel=.15):
        #pdb.set_trace()
        #self.arm_obj.set_cart_pose_ik(cart_pose, total_time=motion_length, frame=frame, block=False)
        #cart_pose = tfu.tf_as_matrix((position.A1.tolist(), orientation.A1.tolist()))

        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame, rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame, self.tf_listener)
        cart_pose = tfu.tf_as_matrix((position.A1.tolist(), orientation.A1.tolist()))
        cart_pose = cart_pose * toolframe_T_ikframe
        position, orientation = tfu.matrix_as_tf(cart_pose)

        #goal_pose_ps = create_pose_stamped(position.A1.tolist() + orientation.A1.tolist(), frame)
        goal_pose_ps = cf.create_pose_stamped(position.tolist() + orientation.tolist(), frame)
        r = self.reactive_gr.cm.move_cartesian_ik(goal_pose_ps, blocking=0, step_size=.005, \
                pos_thres=.02, rot_thres=.1, timeout=rospy.Duration(timeout),
                settling_time=rospy.Duration(settling_time), vel=vel)
        if not (r == 'sent goal' or r == 'success'):
            return r

        #move_cartesian_ik(self, goal_pose, collision_aware = 0, blocking = 1,                            
        #                  step_size = .005, pos_thres = .02, rot_thres = .1,                                
        #                  timeout = rospy.Duration(30.),                                                    
        #                  settling_time = rospy.Duration(0.25), vel = .15):   

        stop_trigger = None
        done_time = None
        start_time = rospy.get_rostime()
        while stop_trigger == None:
            for f, name in stop_funcs:
                if f():
                    self.arm_obj.stop_trajectory_execution()
                    stop_trigger = name
                    rospy.loginfo('"%s" requested that motion should be stopped.' % (name))
                    break

            if timeout != 0. and rospy.get_rostime() - start_time > rospy.Duration(timeout):
                rospy.loginfo("_move_cartesian_ik: motion timed out")
                break

            if (not done_time) and (not self.arm_obj.has_active_goal()):
                #rospy.loginfo("check_cartesian_done returned 1")
                done_time = rospy.get_rostime()

            if done_time and rospy.get_rostime() - done_time > rospy.Duration(settling_time):
                rospy.loginfo("_move_cartesian_ik: done settling")
                break

        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        return stop_trigger
Exemplo n.º 5
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not self.has_msg:
            raise RuntimeError('Has not been initialized with a CameraInfo message (call camera_info).')
        if not(from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        hrow = np.matrix(np.zeros((1,p.shape[1])))
        p = np.row_stack((p, hrow))
        pp = self.P * p
        pp = pp / pp[2,:]
        return pp[0:2,:]
Exemplo n.º 6
0
    def project(self, p, tf_listener=None, from_frame=None):
        if not self.has_msg:
            raise RuntimeError(
                'Has not been initialized with a CameraInfo message (call camera_info).'
            )
        if not (from_frame == None or from_frame == self.frame):
            p_cam = tfu.transform(self.frame, from_frame, tf_listener) \
                           * tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
            trans, q = tfu.matrix_as_tf(p_cam)
            p = np.matrix(trans).T

        hrow = np.matrix(np.zeros((1, p.shape[1])))
        p = np.row_stack((p, hrow))
        pp = self.P * p
        pp = pp / pp[2, :]
        return pp[0:2, :]
Exemplo n.º 7
0
    def go_angle(self, target_base, tolerance=math.radians(5.), max_ang_vel=math.radians(20.), func=None):
        self.tl.waitForTransform('odom_combined', 'base_footprint', rospy.Time(), rospy.Duration(10))
        rate = rospy.Rate(100)
        k = math.radians(80)

        od_T_bf = tfu.transform('odom_combined', 'base_footprint', self.tl)
        target_odom_mat = od_T_bf * tfu.tf_as_matrix( ([0, 0, 0.], tr.quaternion_from_euler(0, 0, target_base)) )
        target_odom = tr.euler_from_quaternion(tfu.matrix_as_tf(target_odom_mat)[1])[2]
        #target_odom = (od_T_bf * np.row_stack([target_base, np.matrix([0,1.]).T]))[0:2,0]

        #current_ang_odom = tr.euler_from_matrix(tfu.transform('base_footprint', 'odom_combined', self.tl)[0:3, 0:3], 'sxyz')[2]
        #target_odom = current_ang_odom + delta_ang

        robot_odom = tfu.transform('odom_combined', 'base_footprint', self.tl)
        current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
        diff = ut.standard_rad(current_ang_odom - target_odom)
        rospy.loginfo('go_angle: target %.2f' %  np.degrees(target_odom))
        rospy.loginfo('go_angle: current %.2f' %  np.degrees(current_ang_odom))
        rospy.loginfo('go_angle: diff %.2f' %  np.degrees(diff))

        while not rospy.is_shutdown():
            robot_odom = tfu.transform('odom_combined', 'base_footprint', self.tl)
            current_ang_odom = tr.euler_from_matrix(robot_odom[0:3, 0:3], 'sxyz')[2]
            diff = ut.standard_rad(target_odom - current_ang_odom)
            rospy.loginfo('go_angle: current %.2f diff %.2f' %  (np.degrees(current_ang_odom), np.degrees(diff)))
            p = k * diff
            if func != None:
                func(diff)
            if np.abs(diff) < tolerance:
                break

            if self.go_ang_server.is_preempt_requested():
                self.go_ang_server.set_preempted()
                break

            tw = gm.Twist()
            vels = [p, np.sign(p) * max_ang_vel]
            tw.angular.z = vels[np.argmin(np.abs(vels))]
            #rospy.loginfo('diff %.3f vel %.3f' % (math.degrees(diff), math.degrees(tw.angular.z)))
            self.tw_pub.publish(tw)
            #rospy.loginfo('commanded %s' % str(tw))
            rate.sleep()
        rospy.loginfo('go_ang: finished %.3f' % math.degrees(diff))
        return diff
Exemplo n.º 8
0
    def torso_lift_link_T_omni(self, tip_omni, msg_frame):
        center_T_6 = tfu.transform(self.omni_name+'_center', msg_frame, self.tflistener)
        tip_center = center_T_6*tip_omni
        tip_center_t = (np.array(tr.translation_from_matrix(tip_center))*np.array(self.scale_omni_l0)).tolist()
        tip_center_q = tr.quaternion_from_matrix(tip_center)

        # z_T_6 = tfu.transform(self.omni_name+'_link0', msg_frame, self.tflistener)
        # tip_0 = z_T_6*tip_omni
        # tip_0t = (np.array(tr.translation_from_matrix(tip_0))*np.array(self.scale_omni_l0)).tolist()
        # tip_0q = tr.quaternion_from_matrix(tip_0)

    #     #Transform into torso frame so we can bound arm workspace
        tll_T_0 = tfu.transform('/torso_lift_link', self.omni_name + '_center', self.tflistener)
        tip_torso_mat = tll_T_0 * tfu.tf_as_matrix([tip_center_t, tip_center_q])
        tip_tt, tip_tq = tfu.matrix_as_tf(tip_torso_mat)

        #don't need to bound, arm controller should do this
    #     tip_tt[0] = limit_range(tip_tt[0], self.X_BOUNDS[0], self.X_BOUNDS[1])
        self.tip_tt = tip_tt
        self.tip_tq = tip_tq
Exemplo n.º 9
0
    def torso_lift_link_T_omni(self, tip_omni, msg_frame):
        center_T_6 = tfu.transform(self.omni_name + '_center', msg_frame,
                                   self.tflistener)
        tip_center = center_T_6 * tip_omni
        tip_center_t = (np.array(tr.translation_from_matrix(tip_center)) *
                        np.array(self.scale_omni_l0)).tolist()
        tip_center_q = tr.quaternion_from_matrix(tip_center)

        # z_T_6 = tfu.transform(self.omni_name+'_link0', msg_frame, self.tflistener)
        # tip_0 = z_T_6*tip_omni
        # tip_0t = (np.array(tr.translation_from_matrix(tip_0))*np.array(self.scale_omni_l0)).tolist()
        # tip_0q = tr.quaternion_from_matrix(tip_0)

        #     #Transform into torso frame so we can bound arm workspace
        tll_T_0 = tfu.transform('/torso_lift_link', self.omni_name + '_center',
                                self.tflistener)
        tip_torso_mat = tll_T_0 * tfu.tf_as_matrix(
            [tip_center_t, tip_center_q])
        tip_tt, tip_tq = tfu.matrix_as_tf(tip_torso_mat)

        #don't need to bound, arm controller should do this
        #     tip_tt[0] = limit_range(tip_tt[0], self.X_BOUNDS[0], self.X_BOUNDS[1])
        self.tip_tt = tip_tt
        self.tip_tq = tip_tq
Exemplo n.º 10
0
    def _move_cartesian_cart(self, position, orientation, \
            stop_funcs=[], timeout = 3.0, settling_time = 0.5, frame='base_link'):

        # TODO: Test this function...
        # Transform the pose from 'frame' to 'base_link'
        self.tf_listener.waitForTransform('base_link', frame, rospy.Time(),
                                          rospy.Duration(10))
        frame_T_base = tfu.transform('base_link', frame, self.tf_listener)
        init_cart_pose = tfu.tf_as_matrix((position.A1.tolist(),
                                           orientation.A1.tolist()))
        base_cart_pose = frame_T_base*init_cart_pose

        # Get IK from tool frame to wrist frame for control input
        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame, rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame, self.tf_listener)
        #base_cart_pose = tfu.tf_as_matrix((base_position.A1.tolist(), base_orientation.A1.tolist()))
        base_cart_pose = base_cart_pose * toolframe_T_ikframe
        base_position, base_orientation = tfu.matrix_as_tf(base_cart_pose)

        pose_stamped = cf.create_pose_stamped(base_position.tolist() + base_orientation.tolist())
        rg = self.reactive_gr
        rg.check_preempt()

        #send the goal to the Cartesian controllers
        #rospy.loginfo("sending goal to Cartesian controllers")
        (pos, rot) = cf.pose_stamped_to_lists(rg.cm.tf_listener, pose_stamped, 'base_link')
        rg.move_cartesian_step(pos+rot, timeout, settling_time)

        #watch the fingertip/palm sensors until the controllers are done and then some
        start_time = rospy.get_rostime()
        done_time = None
        #stopped = False
        stop_trigger = None
        #print 'enterning loop'
        stop_detector = ArmStoppedDetector()
        while(1):

            rg.check_preempt()
            if len(stop_funcs) > 0:
                for f, name in stop_funcs:
                    if f():
                        rg.cm.switch_to_joint_mode()
                        rg.cm.freeze_arm()
                        #stopped = True
                        stop_trigger = name
                        rospy.loginfo('"%s" requested that motion should be stopped.' % (name))
                        break
                if stop_trigger != None:
                    break

            #if stop_func != None and stop_func():
            #    rg.cm.switch_to_joint_mode()
            #    rg.cm.freeze_arm()
            #    stopped = True
            #    break

            #check if we're actually there
            if rg.cm.check_cartesian_near_pose(pose_stamped, .0025, .05):
                rospy.loginfo("_move_cartesian_cart: reached pose")
                #stop_trigger = 'at_pose'
                break

            stop_detector.record_diff(self.arm_obj.pose_cartesian())
            if stop_detector.is_stopped():
                rospy.loginfo("_move_cartesian_cart: arm stopped")
                #stop_trigger = 'stopped'
                break

            # if rg.cm.check_cartesian_really_done(pose_stamped, .0025, .05):
            #     #rospy.loginfo("actually got there")
            #     break
            #
            # #check if the controllers think we're done
            # if not done_time and rg.cm.check_cartesian_done():
            #     #rospy.loginfo("check_cartesian_done returned 1")
            #     done_time = rospy.get_rostime()

            # #done settling
            # if done_time and rospy.get_rostime() - done_time > rospy.Duration(settling_time):
            #     rospy.loginfo("done settling")
            #     break

            #timed out
            #if timeout != 0. and rospy.get_rostime() - start_time > rospy.Duration(timeout):
            #    rospy.loginfo("_move_cartesian_cart: timed out")
            #    break

        #if stop_trigger == 'pressure_safety' or stop_trigger == 'self_collision':
        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        #name = ut.formatted_time() + '_stop_detector.pkl'
        #print 'saved', name
        #ut.save_pickle(stop_detector, name)
        return stop_trigger
Exemplo n.º 11
0
 def frame_loc(from_frame):
     p_base = tfu.transform('base_footprint', from_frame, self.tflistener) \
                    * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     #* tfu.tf_as_matrix((p.A1.tolist(), tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemplo n.º 12
0
    def _move_cartesian_ik(self, position, orientation, \
            stop_funcs=[], timeout = 30., settling_time = 0.25, \
            frame='base_link', vel=.15):
        #pdb.set_trace()
        #self.arm_obj.set_cart_pose_ik(cart_pose, total_time=motion_length, frame=frame, block=False)
        #cart_pose = tfu.tf_as_matrix((position.A1.tolist(), orientation.A1.tolist()))

        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame,
                                          rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame,
                                            self.tf_listener)
        cart_pose = tfu.tf_as_matrix(
            (position.A1.tolist(), orientation.A1.tolist()))
        cart_pose = cart_pose * toolframe_T_ikframe
        position, orientation = tfu.matrix_as_tf(cart_pose)

        #goal_pose_ps = create_pose_stamped(position.A1.tolist() + orientation.A1.tolist(), frame)
        goal_pose_ps = cf.create_pose_stamped(
            position.tolist() + orientation.tolist(), frame)
        r = self.reactive_gr.cm.move_cartesian_ik(goal_pose_ps, blocking=0, step_size=.005, \
                pos_thres=.02, rot_thres=.1, timeout=rospy.Duration(timeout),
                settling_time=rospy.Duration(settling_time), vel=vel)
        if not (r == 'sent goal' or r == 'success'):
            return r

        #move_cartesian_ik(self, goal_pose, collision_aware = 0, blocking = 1,
        #                  step_size = .005, pos_thres = .02, rot_thres = .1,
        #                  timeout = rospy.Duration(30.),
        #                  settling_time = rospy.Duration(0.25), vel = .15):

        stop_trigger = None
        done_time = None
        start_time = rospy.get_rostime()
        while stop_trigger == None:
            for f, name in stop_funcs:
                if f():
                    self.arm_obj.stop_trajectory_execution()
                    stop_trigger = name
                    rospy.loginfo(
                        '"%s" requested that motion should be stopped.' %
                        (name))
                    break

            if timeout != 0. and rospy.get_rostime(
            ) - start_time > rospy.Duration(timeout):
                rospy.loginfo("_move_cartesian_ik: motion timed out")
                break

            if (not done_time) and (not self.arm_obj.has_active_goal()):
                #rospy.loginfo("check_cartesian_done returned 1")
                done_time = rospy.get_rostime()

            if done_time and rospy.get_rostime() - done_time > rospy.Duration(
                    settling_time):
                rospy.loginfo("_move_cartesian_ik: done settling")
                break

        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        return stop_trigger
Exemplo n.º 13
0
 def get_pose(self):
     p_base = tfu.transform('map', 'base_footprint', self.tflistener) \
             * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemplo n.º 14
0
 def omni_T_torso(self, torso_mat):
     l0_mat = tfu.transform(self.omni_name + '_center', '/torso_lift_link', self.tflistener) * torso_mat
     l0_t = (np.array(tr.translation_from_matrix(l0_mat)) / np.array(self.scale_omni_l0)).tolist()
     l0_q = tr.quaternion_from_matrix(l0_mat)
     omni_pt_mat = tfu.transform(self.omni_name, self.omni_name + '_center', self.tflistener) * tfu.tf_as_matrix((l0_t, l0_q))
     return tfu.matrix_as_tf(omni_pt_mat)
Exemplo n.º 15
0
def process_bag(full_bag_name, prosilica_image_file, model_image_file, experiment_start_condition_pkl, arm_used='left'):
    bag_path, bag_name_ext = os.path.split(full_bag_name)
    filename, ext = os.path.splitext(bag_name_ext)
    
    ###############################################################################
    # Playback the bag
    bag_playback = Process(target=playback_bag, args=(full_bag_name,))
    bag_playback.start()

    ###############################################################################
    ## Listen for transforms using rosbag
    rospy.init_node('bag_proceessor')

    tl = tf.TransformListener()

    print 'waiting for transform'
    tl.waitForTransform('map', 'base_footprint', rospy.Time(), rospy.Duration(20))
    # Extract the starting location map_T_bf
    p_base = tfu.transform('map', 'base_footprint', tl) \
            * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
    t, r = tfu.matrix_as_tf(p_base)
    pose_base = (t, r)
    print 'done with tf'

    ##########################################################
    ## Find contact locations
    start_conditions = ut.load_pickle(experiment_start_condition_pkl)
    start_conditions['highdef_image'] = prosilica_image_file
    start_conditions['model_image'] = model_image_file
    rospy.loginfo('extracting object localization features')
    start_conditions['pose_parameters'] = extract_object_localization_features2(start_conditions, tl, arm_used, p_base)

    if bag_playback.is_alive():
        rospy.loginfo('Terminating playback process')
        bag_playback.terminate()
        time.sleep(1)
        bag_playback.terminate()
        time.sleep(1)
        rospy.loginfo('Playback process terminated? %s' % str(not bag_playback.is_alive()))


    ###############################################################################
    #Read bag using programmatic API
    pr2_kinematics = pr2k.PR2Kinematics(tl)
    converter = JointMsgConverter()
    rospy.loginfo('opening bag, reading state topics')
    topics_dict = ru.bag_sel(full_bag_name, ['/joint_states', '/l_cart/command_pose', 
                                             '/r_cart/command_pose', '/torso_controller/state',
                                             '/pressure/l_gripper_motor', '/pressure/r_gripper_motor'])

    ## Select the arm that has been moving, segment joint states based on contact states.
    if arm_used == 'left':
        pressures = topics_dict['/pressure/l_gripper_motor']
    elif arm_used == 'right':
        pressures = topics_dict['/pressure/r_gripper_motor']
    else:
        raise RuntimeError('arm_used invalid')

    ## find contact times
    rospy.loginfo('Finding contact times')
    left_f, right_f, ptimes = hpr2.pressure_state_to_mat(pressures['msg'])

    ## create segments based on contacts
    # TODO: make this accept more contact stages
    contact_times = find_contact_times(left_f, right_f, ptimes, 250)
    if len(contact_times) > 2:
        time_segments = [['start', contact_times[0]], [contact_times[0], contact_times[-1]], [contact_times[-1], 'end']]
    else:
        time_segments = [['start', 'end']]

    rospy.loginfo('Splitting messages based on contact times')
    ## split pressure readings based on contact times
    pressure_lseg = segment_msgs(time_segments, topics_dict['/pressure/l_gripper_motor']['msg'])
    pressure_rseg = segment_msgs(time_segments, topics_dict['/pressure/r_gripper_motor']['msg'])

    ## split cartesian commands based on contact times
    lcart_seg = segment_msgs(time_segments, topics_dict['/l_cart/command_pose']['msg'])
    rcart_seg = segment_msgs(time_segments, topics_dict['/r_cart/command_pose']['msg'])

    ## split joint states
    joint_states = topics_dict['/joint_states']['msg']
    print 'there are %d joint state messages in bag' % len(joint_states)

    j_segs     = segment_msgs(time_segments, topics_dict['/joint_states']['msg'])
    jseg_dicts = [converter.msgs_to_dict(seg) for seg in j_segs]
    # find the first set of joint states
    j0_dict    = jseg_dicts[0][0]

    ## perform FK
    rospy.loginfo('Performing FK to find tip locations')
    bf_T_obj = htf.composeHomogeneousTransform(start_conditions['pose_parameters']['frame_bf'], 
                                               start_conditions['pose_parameters']['center_bf'])
    obj_T_bf = np.linalg.inv(bf_T_obj)
    for jseg_dict in jseg_dicts:
        for d in jseg_dict:
            rtip_bf = pr2_kinematics.right.fk('base_footprint',
                    'r_wrist_roll_link', 'r_gripper_tool_frame',
                    d['poses']['rarm'].A1.tolist())
            ltip_bf =  pr2_kinematics.left.fk('base_footprint',
                    'l_wrist_roll_link', 'l_gripper_tool_frame',
                    d['poses']['larm'].A1.tolist())
            rtip_obj = obj_T_bf * rtip_bf
            ltip_obj = obj_T_bf * ltip_bf

            d['rtip_obj'] = tfu.matrix_as_tf(rtip_obj)
            d['ltip_obj'] = tfu.matrix_as_tf(ltip_obj)

            d['rtip_bf'] = tfu.matrix_as_tf(rtip_bf)
            d['ltip_bf'] = tfu.matrix_as_tf(ltip_bf)
    
    ###############################################################################
    # make movement state dictionaries, one for each state
    movement_states = []
    for i, seg in enumerate(time_segments):
        name = "state_%d" % i
        start_times = [lcart_seg[i][0].header.stamp.to_time(), 
                       rcart_seg[i][0].header.stamp.to_time(), 
                       jseg_dicts[i][0]['time'],
                       pressure_lseg[i][0].header.stamp.to_time(), 
                       pressure_rseg[i][0].header.stamp.to_time()]

        sdict = {'name': name,
                 'start_time': np.min(start_times),
                 'cartesian': [[ru.ros_to_dict(ps) for ps in lcart_seg[i]], 
                               [ru.ros_to_dict(ps) for ps in rcart_seg[i]]],
                 'joint_states': jseg_dicts[i]
                 #'pressure': [pressure_lseg[i], pressure_rseg[i]]
                 } 

        movement_states.append(sdict)

    # store in a dict
    data = {'start_conditions': start_conditions, # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points' (in base_frame), 
                                                  # 'highdef_image', 'model_image',
                                                    ## 'pose_parameters'
                                                        ## 'descriptors'
                                                        ## 'directions' (wrt to cannonical orientation)
                                                        ## 'closest_feature'
            'base_pose': pose_base, 
            'robot_pose': j0_dict,
            'arm': arm_used,
            'movement_states': movement_states}

    # save dicts to pickles
    processed_bag_name = '%s_processed.pkl' % os.path.join(bag_path, filename)
    rospy.loginfo('saving to %s' % processed_bag_name)
    ut.save_pickle(data, processed_bag_name)
    bag_playback.join()
    rospy.loginfo('finished!')
Exemplo n.º 16
0
 def frame_loc(from_frame):
     p_base = tfu.transform(self.base_frame, from_frame, self.tflistener) \
                    * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemplo n.º 17
0
    def _move_cartesian_cart(self, position, orientation, \
            stop_funcs=[], timeout = 3.0, settling_time = 0.5, frame='base_link'):

        # TODO: Test this function...
        # Transform the pose from 'frame' to 'base_link'
        self.tf_listener.waitForTransform('base_link', frame, rospy.Time(),
                                          rospy.Duration(10))
        frame_T_base = tfu.transform('base_link', frame, self.tf_listener)
        init_cart_pose = tfu.tf_as_matrix(
            (position.A1.tolist(), orientation.A1.tolist()))
        base_cart_pose = frame_T_base * init_cart_pose

        # Get IK from tool frame to wrist frame for control input
        self.tf_listener.waitForTransform(self.ik_frame, self.tool_frame,
                                          rospy.Time(), rospy.Duration(10))
        toolframe_T_ikframe = tfu.transform(self.tool_frame, self.ik_frame,
                                            self.tf_listener)
        #base_cart_pose = tfu.tf_as_matrix((base_position.A1.tolist(), base_orientation.A1.tolist()))
        base_cart_pose = base_cart_pose * toolframe_T_ikframe
        base_position, base_orientation = tfu.matrix_as_tf(base_cart_pose)

        pose_stamped = cf.create_pose_stamped(base_position.tolist() +
                                              base_orientation.tolist())
        rg = self.reactive_gr
        rg.check_preempt()

        #send the goal to the Cartesian controllers
        #rospy.loginfo("sending goal to Cartesian controllers")
        (pos, rot) = cf.pose_stamped_to_lists(rg.cm.tf_listener, pose_stamped,
                                              'base_link')
        rg.move_cartesian_step(pos + rot, timeout, settling_time)

        #watch the fingertip/palm sensors until the controllers are done and then some
        start_time = rospy.get_rostime()
        done_time = None
        #stopped = False
        stop_trigger = None
        #print 'enterning loop'
        stop_detector = ArmStoppedDetector()
        while (1):

            rg.check_preempt()
            if len(stop_funcs) > 0:
                for f, name in stop_funcs:
                    if f():
                        rg.cm.switch_to_joint_mode()
                        rg.cm.freeze_arm()
                        #stopped = True
                        stop_trigger = name
                        rospy.loginfo(
                            '"%s" requested that motion should be stopped.' %
                            (name))
                        break
                if stop_trigger != None:
                    break

            #if stop_func != None and stop_func():
            #    rg.cm.switch_to_joint_mode()
            #    rg.cm.freeze_arm()
            #    stopped = True
            #    break

            #check if we're actually there
            if rg.cm.check_cartesian_near_pose(pose_stamped, .0025, .05):
                rospy.loginfo("_move_cartesian_cart: reached pose")
                #stop_trigger = 'at_pose'
                break

            stop_detector.record_diff(self.arm_obj.pose_cartesian())
            if stop_detector.is_stopped():
                rospy.loginfo("_move_cartesian_cart: arm stopped")
                #stop_trigger = 'stopped'
                break

            # if rg.cm.check_cartesian_really_done(pose_stamped, .0025, .05):
            #     #rospy.loginfo("actually got there")
            #     break
            #
            # #check if the controllers think we're done
            # if not done_time and rg.cm.check_cartesian_done():
            #     #rospy.loginfo("check_cartesian_done returned 1")
            #     done_time = rospy.get_rostime()

            # #done settling
            # if done_time and rospy.get_rostime() - done_time > rospy.Duration(settling_time):
            #     rospy.loginfo("done settling")
            #     break

            #timed out
            #if timeout != 0. and rospy.get_rostime() - start_time > rospy.Duration(timeout):
            #    rospy.loginfo("_move_cartesian_cart: timed out")
            #    break

        #if stop_trigger == 'pressure_safety' or stop_trigger == 'self_collision':
        if stop_trigger == 'pressure_safety':
            print 'ROBOT SAFETY ERROR'
            #raise RobotSafetyError(stop_trigger)
        #name = ut.formatted_time() + '_stop_detector.pkl'
        #print 'saved', name
        #ut.save_pickle(stop_detector, name)
        return stop_trigger
Exemplo n.º 18
0
 def frame_loc(from_frame):
     p_base = tfu.transform(self.base_frame, from_frame, self.tflistener) \
                    * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemplo n.º 19
0
 def get_pose(self):
     p_base = tfu.transform('map', 'base_footprint', self.tflistener) \
             * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
     return tfu.matrix_as_tf(p_base)
Exemplo n.º 20
0
def process_bag(full_bag_name,
                prosilica_image_file,
                model_image_file,
                experiment_start_condition_pkl,
                arm_used='left'):
    bag_path, bag_name_ext = os.path.split(full_bag_name)
    filename, ext = os.path.splitext(bag_name_ext)

    ###############################################################################
    # Playback the bag
    bag_playback = Process(target=playback_bag, args=(full_bag_name, ))
    bag_playback.start()

    ###############################################################################
    ## Listen for transforms using rosbag
    rospy.init_node('bag_proceessor')

    tl = tf.TransformListener()

    print 'waiting for transform'
    tl.waitForTransform('map', 'base_footprint', rospy.Time(),
                        rospy.Duration(20))
    # Extract the starting location map_T_bf
    p_base = tfu.transform('map', 'base_footprint', tl) \
            * tfu.tf_as_matrix(([0., 0., 0., 1.], tr.quaternion_from_euler(0,0,0)))
    t, r = tfu.matrix_as_tf(p_base)
    pose_base = (t, r)
    print 'done with tf'

    ##########################################################
    ## Find contact locations
    start_conditions = ut.load_pickle(experiment_start_condition_pkl)
    start_conditions['highdef_image'] = prosilica_image_file
    start_conditions['model_image'] = model_image_file
    rospy.loginfo('extracting object localization features')
    start_conditions[
        'pose_parameters'] = extract_object_localization_features2(
            start_conditions, tl, arm_used, p_base)

    if bag_playback.is_alive():
        rospy.loginfo('Terminating playback process')
        bag_playback.terminate()
        time.sleep(1)
        bag_playback.terminate()
        time.sleep(1)
        rospy.loginfo('Playback process terminated? %s' %
                      str(not bag_playback.is_alive()))

    ###############################################################################
    #Read bag using programmatic API
    pr2_kinematics = pr2k.PR2Kinematics(tl)
    converter = JointMsgConverter()
    rospy.loginfo('opening bag, reading state topics')
    topics_dict = ru.bag_sel(full_bag_name, [
        '/joint_states', '/l_cart/command_pose', '/r_cart/command_pose',
        '/torso_controller/state', '/pressure/l_gripper_motor',
        '/pressure/r_gripper_motor'
    ])

    ## Select the arm that has been moving, segment joint states based on contact states.
    if arm_used == 'left':
        pressures = topics_dict['/pressure/l_gripper_motor']
    elif arm_used == 'right':
        pressures = topics_dict['/pressure/r_gripper_motor']
    else:
        raise RuntimeError('arm_used invalid')

    ## find contact times
    rospy.loginfo('Finding contact times')
    left_f, right_f, ptimes = hpr2.pressure_state_to_mat(pressures['msg'])

    ## create segments based on contacts
    # TODO: make this accept more contact stages
    contact_times = find_contact_times(left_f, right_f, ptimes, 250)
    if len(contact_times) > 2:
        time_segments = [['start', contact_times[0]],
                         [contact_times[0], contact_times[-1]],
                         [contact_times[-1], 'end']]
    else:
        time_segments = [['start', 'end']]

    rospy.loginfo('Splitting messages based on contact times')
    ## split pressure readings based on contact times
    pressure_lseg = segment_msgs(
        time_segments, topics_dict['/pressure/l_gripper_motor']['msg'])
    pressure_rseg = segment_msgs(
        time_segments, topics_dict['/pressure/r_gripper_motor']['msg'])

    ## split cartesian commands based on contact times
    lcart_seg = segment_msgs(time_segments,
                             topics_dict['/l_cart/command_pose']['msg'])
    rcart_seg = segment_msgs(time_segments,
                             topics_dict['/r_cart/command_pose']['msg'])

    ## split joint states
    joint_states = topics_dict['/joint_states']['msg']
    print 'there are %d joint state messages in bag' % len(joint_states)

    j_segs = segment_msgs(time_segments, topics_dict['/joint_states']['msg'])
    jseg_dicts = [converter.msgs_to_dict(seg) for seg in j_segs]
    # find the first set of joint states
    j0_dict = jseg_dicts[0][0]

    ## perform FK
    rospy.loginfo('Performing FK to find tip locations')
    bf_T_obj = htf.composeHomogeneousTransform(
        start_conditions['pose_parameters']['frame_bf'],
        start_conditions['pose_parameters']['center_bf'])
    obj_T_bf = np.linalg.inv(bf_T_obj)
    for jseg_dict in jseg_dicts:
        for d in jseg_dict:
            rtip_bf = pr2_kinematics.right.fk('base_footprint',
                                              'r_wrist_roll_link',
                                              'r_gripper_tool_frame',
                                              d['poses']['rarm'].A1.tolist())
            ltip_bf = pr2_kinematics.left.fk('base_footprint',
                                             'l_wrist_roll_link',
                                             'l_gripper_tool_frame',
                                             d['poses']['larm'].A1.tolist())
            rtip_obj = obj_T_bf * rtip_bf
            ltip_obj = obj_T_bf * ltip_bf

            d['rtip_obj'] = tfu.matrix_as_tf(rtip_obj)
            d['ltip_obj'] = tfu.matrix_as_tf(ltip_obj)

            d['rtip_bf'] = tfu.matrix_as_tf(rtip_bf)
            d['ltip_bf'] = tfu.matrix_as_tf(ltip_bf)

    ###############################################################################
    # make movement state dictionaries, one for each state
    movement_states = []
    for i, seg in enumerate(time_segments):
        name = "state_%d" % i
        start_times = [
            lcart_seg[i][0].header.stamp.to_time(),
            rcart_seg[i][0].header.stamp.to_time(), jseg_dicts[i][0]['time'],
            pressure_lseg[i][0].header.stamp.to_time(),
            pressure_rseg[i][0].header.stamp.to_time()
        ]

        sdict = {
            'name':
            name,
            'start_time':
            np.min(start_times),
            'cartesian': [[ru.ros_to_dict(ps) for ps in lcart_seg[i]],
                          [ru.ros_to_dict(ps) for ps in rcart_seg[i]]],
            'joint_states':
            jseg_dicts[i]
            #'pressure': [pressure_lseg[i], pressure_rseg[i]]
        }

        movement_states.append(sdict)

    # store in a dict
    data = {
        'start_conditions':
        start_conditions,  # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points' (in base_frame), 
        # 'highdef_image', 'model_image',
        ## 'pose_parameters'
        ## 'descriptors'
        ## 'directions' (wrt to cannonical orientation)
        ## 'closest_feature'
        'base_pose': pose_base,
        'robot_pose': j0_dict,
        'arm': arm_used,
        'movement_states': movement_states
    }

    # save dicts to pickles
    processed_bag_name = '%s_processed.pkl' % os.path.join(bag_path, filename)
    rospy.loginfo('saving to %s' % processed_bag_name)
    ut.save_pickle(data, processed_bag_name)
    bag_playback.join()
    rospy.loginfo('finished!')