示例#1
0
def extract_object_localization_features(start_conditions, tflistener):
    ## Find contacts
    find_contact_locs = ExtractTFData(tflistener)
    r = rospy.Rate(10)
    while not rospy.is_shutdown() and not find_contact_locs.contact_stopped:
        r.sleep()
    contact_locs = find_contact_locs.contact_locs

    ## Detect features, get 3d location for each feature
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(cv.LoadImage(model_file_name))

    ## Assign 3d location to surf features
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    point_cloud_pro = start_conditions['pro_T_bf'] * np.row_stack((point_cloud_bf, 1+np.zeros((1, point_cloud_bf.shape[1]))))
    point_cloud_2d_pro = start_conditions['camera_info'].project(point_cloud_pro[0:3,:])
    surf_loc3d_arr_bf = np.array(assign_3d_to_surf(model_surf_loc, point_cloud_bf, point_cloud_2d_pro))
    surf_loc_tree_bf = sp.KDTree(surf_loc3d_arr_bf.T)

    #################################################
    # not needed right now but can be useful later..
    #################################################
    # Get SURF features closest to contact locs
    left_contact, right_contact = zip(*[(np.matrix(r[1][2]).T, np.matrix(r[1][3]).T) for r in contact_locs])
    left_contact = np.column_stack(left_contact)
    right_contact = np.column_stack(right_contact)
    mid_contact_bf = (left_contact[:,0] + right_contact[:,0]) / 2.
    #data_dict['pro_T_bf']  * np.row_stack((mid_contact_bf, np

    surf_closest_idx = surf_loc_tree_bf.query(np.array(mid_contact_bf.T))[1] #Get surf feature at mid point
    surf_closest3d   = surf_loc3d_arr_bf[:, surf_closest_idx]
    surf_closest_fea = model_surf_loc[surf_closest_idx]

    #Create a frame for this group of features
    surf_loc_3d_pro = (start_conditions['pro_T_bf'] * np.row_stack([surf_loc3d_arr_bf, 1 + np.zeros((1, surf_loc3d_arr_bf.shape[1]))]))[0:3,:]
    object_frame_pro = create_frame(np.matrix(surf_loc_3d_pro))

    #Find out what the SURF features point to in this new frame
    surf_directions = []
    for loc, lap, size, direction, hess in model_surf_loc:
        drad = np.radians(direction)
        #project direction into the cannonical object frame
        surf_dir_obj = object_frame_pro * np.matrix([np.cos(drad), np.sin(drad), 0.]).T

        #measure angle between SURF feature and x axis of object frame, store this as delta theta
        delta_theta = math.atan2(surf_dir_obj[1,0], surf_dir_obj[0,0])
        surf_directions.append(delta_theta)

    return {
            'descriptors': model_surf_descriptors, 
            'directions': surf_directions, 
            'contact_bf': mid_contact_bf,
            'closest_feature': surf_closest_fea[0],
            #'object_frame_bf': [np.mean(np.matrix(surf_loc3d_arr_bf), 1), create_frame(surf_loc3d_arr_bf)],
            'object_frame_pro': [np.mean(np.matrix(surf_loc_3d_pro), 1), object_frame_pro, surf_loc_3d_pro]
            }
示例#2
0
    def approach_perpendicular_to_surface(self, point_bl, voi_radius,
                                          dist_approach):
        map_T_base_link0 = tfu.transform('map', 'base_link', self.tf_listener)
        point_map0 = tfu.transform_points(map_T_base_link0, point_bl)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)

        point_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), \
                                        point_map0)
        point_cloud_bl = self.laser_scan.scan(math.radians(180.),
                                              math.radians(-180.), 2.5)
        point_cloud_np_bl = ru.pointcloud_to_np(point_cloud_bl)
        rospy.loginfo('approach_perpendicular_to_surface: pointcloud size %d' \
                % point_cloud_np_bl.shape[1])
        voi_points_bl, limits_bl = i3d.select_rect(point_bl, voi_radius,
                                                   voi_radius, voi_radius,
                                                   point_cloud_np_bl)
        #TODO: use closest plane instead of closest points determined with KDTree
        normal_bl = i3d.calc_normal(voi_points_bl)
        point_in_front_mechanism_bl = point_bl + normal_bl * dist_approach
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_in_front_mechanism_map = tfu.transform_points(
            map_T_base_link, point_in_front_mechanism_bl)

        #Navigate to point (TODO: check for collisions)
        point_map = tfu.transform_points(map_T_base_link, point_bl)
        t_current_map, r_current_map = self.robot.base.get_pose()
        rospy.loginfo('approach_perpendicular_to_surface: driving for %.3f m to front of surface' \
                % np.linalg.norm(t_current_map[0:2] - point_in_front_mechanism_map[0:2,0].T))
        #pdb.set_trace()
        rvalue = self.robot.base.set_pose(
            point_in_front_mechanism_map.T.A1.tolist(), r_current_map, 'map')
        if rvalue != 3:
            return rvalue

        t1_current_map, r1_current_map = self.robot.base.get_pose()
        rospy.loginfo(
            'approach_perpendicular_to_surface: %.3f m away from from of surface'
            % np.linalg.norm(t1_current_map[0:2] -
                             point_in_front_mechanism_map[0:2, 0].T))

        #Rotate to face point (TODO: check for collisions)
        base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
        point_bl = tfu.transform_points(base_link_T_map, point_map)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)
        time.sleep(2.)

        return rvalue
示例#3
0
def find3d_surf(start_conditions):
    ## Project pointcloud into 2d
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    # from base_frame to prosilica frame
    point_cloud_pro = tfu.transform_points(start_conditions['pro_T_bf'], point_cloud_bf)
    point_cloud_2d_pro, point_cloud_reduced_pro = project_2d_bounded(start_conditions['camera_info'], point_cloud_pro)
    #point_cloud_2d_pro = .project(point_cloud_pro) 
    ## only count points in image bounds (should be in cam info)
    #cam_info = start_conditions['camera_info']
    #_, in_bounds = np.where(np.invert((point_cloud_2d_pro[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_pro[0,:] < 0) \
    #                                + (point_cloud_2d_pro[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_pro[1,:] < 0)))
    #point_cloud_2d_pro = point_cloud_2d_pro[:, in_bounds.A1]
    #point_cloud_reduced_pro = point_cloud_pro[:, in_bounds.A1]

    ## Find 3D SURF features
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(cv.LoadImage(model_file_name))
    surf_loc3d_pro = np.matrix(assign_3d_to_surf(model_surf_loc, point_cloud_reduced_pro, point_cloud_2d_pro))
    return model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro
    def approach_perpendicular_to_surface(self, point_bl, voi_radius, dist_approach):
        map_T_base_link0 = tfu.transform('map', 'base_link', self.tf_listener)
        point_map0 = tfu.transform_points(map_T_base_link0, point_bl)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)

        point_bl = tfu.transform_points(tfu.transform('base_link', 'map', self.tf_listener), \
                                        point_map0)
        point_cloud_bl = self.laser_scan.scan(math.radians(180.), math.radians(-180.), 2.5)
        point_cloud_np_bl = ru.pointcloud_to_np(point_cloud_bl)
        rospy.loginfo('approach_perpendicular_to_surface: pointcloud size %d' \
                % point_cloud_np_bl.shape[1])
        voi_points_bl, limits_bl = i3d.select_rect(point_bl, voi_radius, voi_radius, voi_radius, point_cloud_np_bl)
        #TODO: use closest plane instead of closest points determined with KDTree
        normal_bl = i3d.calc_normal(voi_points_bl)
        point_in_front_mechanism_bl = point_bl + normal_bl * dist_approach
        map_T_base_link = tfu.transform('map', 'base_link', self.tf_listener)
        point_in_front_mechanism_map = tfu.transform_points(map_T_base_link, point_in_front_mechanism_bl)

        #Navigate to point (TODO: check for collisions)
        point_map = tfu.transform_points(map_T_base_link, point_bl)
        t_current_map, r_current_map = self.robot.base.get_pose()
        rospy.loginfo('approach_perpendicular_to_surface: driving for %.3f m to front of surface' \
                % np.linalg.norm(t_current_map[0:2] - point_in_front_mechanism_map[0:2,0].T))
        #pdb.set_trace()
        rvalue = self.robot.base.set_pose(point_in_front_mechanism_map.T.A1.tolist(), r_current_map, 'map')
        if rvalue != 3:
            return rvalue

        t1_current_map, r1_current_map = self.robot.base.get_pose()
        rospy.loginfo('approach_perpendicular_to_surface: %.3f m away from from of surface' % np.linalg.norm(t1_current_map[0:2] - point_in_front_mechanism_map[0:2,0].T))

        #Rotate to face point (TODO: check for collisions)
        base_link_T_map = tfu.transform('base_link', 'map', self.tf_listener)
        point_bl = tfu.transform_points(base_link_T_map, point_map)
        #pdb.set_trace()
        self.turn_to_point(point_bl, block=False)
        time.sleep(2.)

        return rvalue
示例#5
0
def find3d_surf(start_conditions):
    ## Project pointcloud into 2d
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    # from base_frame to prosilica frame
    point_cloud_pro = tfu.transform_points(start_conditions['pro_T_bf'],
                                           point_cloud_bf)
    point_cloud_2d_pro, point_cloud_reduced_pro = project_2d_bounded(
        start_conditions['camera_info'], point_cloud_pro)
    #point_cloud_2d_pro = .project(point_cloud_pro)
    ## only count points in image bounds (should be in cam info)
    #cam_info = start_conditions['camera_info']
    #_, in_bounds = np.where(np.invert((point_cloud_2d_pro[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_pro[0,:] < 0) \
    #                                + (point_cloud_2d_pro[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_pro[1,:] < 0)))
    #point_cloud_2d_pro = point_cloud_2d_pro[:, in_bounds.A1]
    #point_cloud_reduced_pro = point_cloud_pro[:, in_bounds.A1]

    ## Find 3D SURF features
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(
        cv.LoadImage(model_file_name))
    surf_loc3d_pro = np.matrix(
        assign_3d_to_surf(model_surf_loc, point_cloud_reduced_pro,
                          point_cloud_2d_pro))
    return model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro
示例#6
0
 def scan_np(self, start, end, duration):
     resp = self.sp(start, end, duration)
     return ru.pointcloud_to_np(resp.cloud)
示例#7
0
def extract_object_localization_features(start_conditions, tflistener):
    ## Find contacts
    find_contact_locs = ExtractTFData(tflistener)
    r = rospy.Rate(10)
    while not rospy.is_shutdown() and not find_contact_locs.contact_stopped:
        r.sleep()
    contact_locs = find_contact_locs.contact_locs

    ## Detect features, get 3d location for each feature
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(
        cv.LoadImage(model_file_name))

    ## Assign 3d location to surf features
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    point_cloud_pro = start_conditions['pro_T_bf'] * np.row_stack(
        (point_cloud_bf, 1 + np.zeros((1, point_cloud_bf.shape[1]))))
    point_cloud_2d_pro = start_conditions['camera_info'].project(
        point_cloud_pro[0:3, :])
    surf_loc3d_arr_bf = np.array(
        assign_3d_to_surf(model_surf_loc, point_cloud_bf, point_cloud_2d_pro))
    surf_loc_tree_bf = sp.KDTree(surf_loc3d_arr_bf.T)

    #################################################
    # not needed right now but can be useful later..
    #################################################
    # Get SURF features closest to contact locs
    left_contact, right_contact = zip(*[(np.matrix(r[1][2]).T,
                                         np.matrix(r[1][3]).T)
                                        for r in contact_locs])
    left_contact = np.column_stack(left_contact)
    right_contact = np.column_stack(right_contact)
    mid_contact_bf = (left_contact[:, 0] + right_contact[:, 0]) / 2.
    #data_dict['pro_T_bf']  * np.row_stack((mid_contact_bf, np

    surf_closest_idx = surf_loc_tree_bf.query(np.array(
        mid_contact_bf.T))[1]  #Get surf feature at mid point
    surf_closest3d = surf_loc3d_arr_bf[:, surf_closest_idx]
    surf_closest_fea = model_surf_loc[surf_closest_idx]

    #Create a frame for this group of features
    surf_loc_3d_pro = (start_conditions['pro_T_bf'] * np.row_stack(
        [surf_loc3d_arr_bf, 1 + np.zeros(
            (1, surf_loc3d_arr_bf.shape[1]))]))[0:3, :]
    object_frame_pro = create_frame(np.matrix(surf_loc_3d_pro))

    #Find out what the SURF features point to in this new frame
    surf_directions = []
    for loc, lap, size, direction, hess in model_surf_loc:
        drad = np.radians(direction)
        #project direction into the cannonical object frame
        surf_dir_obj = object_frame_pro * np.matrix(
            [np.cos(drad), np.sin(drad), 0.]).T

        #measure angle between SURF feature and x axis of object frame, store this as delta theta
        delta_theta = math.atan2(surf_dir_obj[1, 0], surf_dir_obj[0, 0])
        surf_directions.append(delta_theta)

    return {
        'descriptors':
        model_surf_descriptors,
        'directions':
        surf_directions,
        'contact_bf':
        mid_contact_bf,
        'closest_feature':
        surf_closest_fea[0],
        #'object_frame_bf': [np.mean(np.matrix(surf_loc3d_arr_bf), 1), create_frame(surf_loc3d_arr_bf)],
        'object_frame_pro': [
            np.mean(np.matrix(surf_loc_3d_pro), 1), object_frame_pro,
            surf_loc_3d_pro
        ]
    }
示例#8
0
    proc_img_name = sys.argv[1]
    pickle_name = sys.argv[2]

    data_dict = ut.load_pickle(
        pickle_name)  # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points']
    proc_cam_info = ut.load_pickle('prosilica_caminfo.pkl')

    rospy.init_node('prosilica_set_view')
    points_pub = rospy.Publisher('/pc_snap_shot', sm.PointCloud)
    touchll_pub = rospy.Publisher('touch_ll', sm.PointCloud)
    touchlr_pub = rospy.Publisher('touch_lr', sm.PointCloud)
    proc_pub = rospy.Publisher('/prosilica/image_rect_color', sm.Image)
    cam_info = rospy.Publisher('/prosilica/camera_info', sm.CameraInfo)

    print 'pointcloud frame', data_dict['points'].header.frame_id
    point_cloud = ru.pointcloud_to_np(data_dict['points'])

    #skip this step if we have prerecorded pickle
    try:
        print 'loading contact_locs_proc.pkl'
        contact_locs = ut.load_pickle('contact_locs_proc.pkl')
    except Exception, e:
        print e
        print 'listening'
        et = ListenAndFindContactLocs()
        r = rospy.Rate(10)
        while not rospy.is_shutdown() and not et.contact_stopped:
            r.sleep()
        contact_locs = et.contact_locs
        ut.save_pickle(et.contact_locs, 'contact_locs_proc.pkl')
示例#9
0
    proc_img_name = sys.argv[1]
    pickle_name = sys.argv[2]
    
    data_dict = ut.load_pickle(pickle_name) # ['camera_info', 'map_T_bf', 'pro_T_bf', 'points']
    proc_cam_info = ut.load_pickle('prosilica_caminfo.pkl')

    rospy.init_node('prosilica_set_view')
    points_pub = rospy.Publisher('/pc_snap_shot', sm.PointCloud)
    touchll_pub = rospy.Publisher('touch_ll', sm.PointCloud)
    touchlr_pub = rospy.Publisher('touch_lr', sm.PointCloud)
    proc_pub = rospy.Publisher('/prosilica/image_rect_color', sm.Image)
    cam_info = rospy.Publisher('/prosilica/camera_info', sm.CameraInfo)

    print 'pointcloud frame', data_dict['points'].header.frame_id
    point_cloud = ru.pointcloud_to_np(data_dict['points'])

    #skip this step if we have prerecorded pickle
    try:
        print 'loading contact_locs_proc.pkl'
        contact_locs = ut.load_pickle('contact_locs_proc.pkl')
    except Exception, e:
        print e
        print 'listening'         
        et = ListenAndFindContactLocs()
        r = rospy.Rate(10)
        while not rospy.is_shutdown() and not et.contact_stopped:
            r.sleep()
        contact_locs = et.contact_locs
        ut.save_pickle(et.contact_locs, 'contact_locs_proc.pkl')
示例#10
0
 def scan_np(self, start, end, duration):
     resp = self.sp(start, end, duration)
     return ru.pointcloud_to_np(resp.cloud)
示例#11
0
 def subsample(self, points3d, frame='base_link'):
     req = fsrv.SubsampleCalcRequest()
     req.input = ru.np_to_pointcloud(points3d, frame)
     res = self.proxy(req)
     return ru.pointcloud_to_np(res.output)
示例#12
0
 def subsample(self, points3d, frame="base_link"):
     req = fsrv.SubsampleCalcRequest()
     req.input = ru.np_to_pointcloud(points3d, frame)
     res = self.proxy(req)
     return ru.pointcloud_to_np(res.output)