Exemple #1
0
def extract_object_localization_features(start_conditions, tflistener):
    ## Find contacts
    find_contact_locs = ExtractTFData(tflistener)
    r = rospy.Rate(10)
    while not rospy.is_shutdown() and not find_contact_locs.contact_stopped:
        r.sleep()
    contact_locs = find_contact_locs.contact_locs

    ## Detect features, get 3d location for each feature
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(cv.LoadImage(model_file_name))

    ## Assign 3d location to surf features
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    point_cloud_pro = start_conditions['pro_T_bf'] * np.row_stack((point_cloud_bf, 1+np.zeros((1, point_cloud_bf.shape[1]))))
    point_cloud_2d_pro = start_conditions['camera_info'].project(point_cloud_pro[0:3,:])
    surf_loc3d_arr_bf = np.array(assign_3d_to_surf(model_surf_loc, point_cloud_bf, point_cloud_2d_pro))
    surf_loc_tree_bf = sp.KDTree(surf_loc3d_arr_bf.T)

    #################################################
    # not needed right now but can be useful later..
    #################################################
    # Get SURF features closest to contact locs
    left_contact, right_contact = zip(*[(np.matrix(r[1][2]).T, np.matrix(r[1][3]).T) for r in contact_locs])
    left_contact = np.column_stack(left_contact)
    right_contact = np.column_stack(right_contact)
    mid_contact_bf = (left_contact[:,0] + right_contact[:,0]) / 2.
    #data_dict['pro_T_bf']  * np.row_stack((mid_contact_bf, np

    surf_closest_idx = surf_loc_tree_bf.query(np.array(mid_contact_bf.T))[1] #Get surf feature at mid point
    surf_closest3d   = surf_loc3d_arr_bf[:, surf_closest_idx]
    surf_closest_fea = model_surf_loc[surf_closest_idx]

    #Create a frame for this group of features
    surf_loc_3d_pro = (start_conditions['pro_T_bf'] * np.row_stack([surf_loc3d_arr_bf, 1 + np.zeros((1, surf_loc3d_arr_bf.shape[1]))]))[0:3,:]
    object_frame_pro = create_frame(np.matrix(surf_loc_3d_pro))

    #Find out what the SURF features point to in this new frame
    surf_directions = []
    for loc, lap, size, direction, hess in model_surf_loc:
        drad = np.radians(direction)
        #project direction into the cannonical object frame
        surf_dir_obj = object_frame_pro * np.matrix([np.cos(drad), np.sin(drad), 0.]).T

        #measure angle between SURF feature and x axis of object frame, store this as delta theta
        delta_theta = math.atan2(surf_dir_obj[1,0], surf_dir_obj[0,0])
        surf_directions.append(delta_theta)

    return {
            'descriptors': model_surf_descriptors, 
            'directions': surf_directions, 
            'contact_bf': mid_contact_bf,
            'closest_feature': surf_closest_fea[0],
            #'object_frame_bf': [np.mean(np.matrix(surf_loc3d_arr_bf), 1), create_frame(surf_loc3d_arr_bf)],
            'object_frame_pro': [np.mean(np.matrix(surf_loc_3d_pro), 1), object_frame_pro, surf_loc_3d_pro]
            }
Exemple #2
0
def find3d_surf(start_conditions):
    ## Project pointcloud into 2d
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    # from base_frame to prosilica frame
    point_cloud_pro = tfu.transform_points(start_conditions['pro_T_bf'], point_cloud_bf)
    point_cloud_2d_pro, point_cloud_reduced_pro = project_2d_bounded(start_conditions['camera_info'], point_cloud_pro)
    #point_cloud_2d_pro = .project(point_cloud_pro) 
    ## only count points in image bounds (should be in cam info)
    #cam_info = start_conditions['camera_info']
    #_, in_bounds = np.where(np.invert((point_cloud_2d_pro[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_pro[0,:] < 0) \
    #                                + (point_cloud_2d_pro[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_pro[1,:] < 0)))
    #point_cloud_2d_pro = point_cloud_2d_pro[:, in_bounds.A1]
    #point_cloud_reduced_pro = point_cloud_pro[:, in_bounds.A1]

    ## Find 3D SURF features
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(cv.LoadImage(model_file_name))
    surf_loc3d_pro = np.matrix(assign_3d_to_surf(model_surf_loc, point_cloud_reduced_pro, point_cloud_2d_pro))
    return model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro
Exemple #3
0
def find3d_surf(start_conditions):
    ## Project pointcloud into 2d
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    # from base_frame to prosilica frame
    point_cloud_pro = tfu.transform_points(start_conditions['pro_T_bf'],
                                           point_cloud_bf)
    point_cloud_2d_pro, point_cloud_reduced_pro = project_2d_bounded(
        start_conditions['camera_info'], point_cloud_pro)
    #point_cloud_2d_pro = .project(point_cloud_pro)
    ## only count points in image bounds (should be in cam info)
    #cam_info = start_conditions['camera_info']
    #_, in_bounds = np.where(np.invert((point_cloud_2d_pro[0,:] >= (cam_info.w-.6)) + (point_cloud_2d_pro[0,:] < 0) \
    #                                + (point_cloud_2d_pro[1,:] >= (cam_info.h-.6)) + (point_cloud_2d_pro[1,:] < 0)))
    #point_cloud_2d_pro = point_cloud_2d_pro[:, in_bounds.A1]
    #point_cloud_reduced_pro = point_cloud_pro[:, in_bounds.A1]

    ## Find 3D SURF features
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(
        cv.LoadImage(model_file_name))
    surf_loc3d_pro = np.matrix(
        assign_3d_to_surf(model_surf_loc, point_cloud_reduced_pro,
                          point_cloud_2d_pro))
    return model_surf_loc, model_surf_descriptors, surf_loc3d_pro, point_cloud_2d_pro
import roslib
roslib.load_manifest('hai_sandbox')
import cv
import hai_sandbox.features as fea
import hrl_camera.ros_camera as rc
import rospy

#prosilica = rc.Prosilica('prosilica', 'streaming')
#prosilica = rc.ROSCamera('/narrow_stereo/right/image_rect')
prosilica = rc.ROSCamera('/wide_stereo/right/image_rect_color')
cv.NamedWindow('surf', 1)
while not rospy.is_shutdown():
    f = prosilica.get_frame()
    loc, desc = fea.surf_color(f, params=(0, 3000, 3, 4))
    fdrawn = fea.draw_surf(f, loc, (0, 255, 0))
    cv.ShowImage('surf', fdrawn)
    cv.WaitKey(33)
Exemple #5
0
def extract_object_localization_features(start_conditions, tflistener):
    ## Find contacts
    find_contact_locs = ExtractTFData(tflistener)
    r = rospy.Rate(10)
    while not rospy.is_shutdown() and not find_contact_locs.contact_stopped:
        r.sleep()
    contact_locs = find_contact_locs.contact_locs

    ## Detect features, get 3d location for each feature
    model_file_name = start_conditions['model_image']
    model_surf_loc, model_surf_descriptors = fea.surf_color(
        cv.LoadImage(model_file_name))

    ## Assign 3d location to surf features
    point_cloud_bf = ru.pointcloud_to_np(start_conditions['points'])
    point_cloud_pro = start_conditions['pro_T_bf'] * np.row_stack(
        (point_cloud_bf, 1 + np.zeros((1, point_cloud_bf.shape[1]))))
    point_cloud_2d_pro = start_conditions['camera_info'].project(
        point_cloud_pro[0:3, :])
    surf_loc3d_arr_bf = np.array(
        assign_3d_to_surf(model_surf_loc, point_cloud_bf, point_cloud_2d_pro))
    surf_loc_tree_bf = sp.KDTree(surf_loc3d_arr_bf.T)

    #################################################
    # not needed right now but can be useful later..
    #################################################
    # Get SURF features closest to contact locs
    left_contact, right_contact = zip(*[(np.matrix(r[1][2]).T,
                                         np.matrix(r[1][3]).T)
                                        for r in contact_locs])
    left_contact = np.column_stack(left_contact)
    right_contact = np.column_stack(right_contact)
    mid_contact_bf = (left_contact[:, 0] + right_contact[:, 0]) / 2.
    #data_dict['pro_T_bf']  * np.row_stack((mid_contact_bf, np

    surf_closest_idx = surf_loc_tree_bf.query(np.array(
        mid_contact_bf.T))[1]  #Get surf feature at mid point
    surf_closest3d = surf_loc3d_arr_bf[:, surf_closest_idx]
    surf_closest_fea = model_surf_loc[surf_closest_idx]

    #Create a frame for this group of features
    surf_loc_3d_pro = (start_conditions['pro_T_bf'] * np.row_stack(
        [surf_loc3d_arr_bf, 1 + np.zeros(
            (1, surf_loc3d_arr_bf.shape[1]))]))[0:3, :]
    object_frame_pro = create_frame(np.matrix(surf_loc_3d_pro))

    #Find out what the SURF features point to in this new frame
    surf_directions = []
    for loc, lap, size, direction, hess in model_surf_loc:
        drad = np.radians(direction)
        #project direction into the cannonical object frame
        surf_dir_obj = object_frame_pro * np.matrix(
            [np.cos(drad), np.sin(drad), 0.]).T

        #measure angle between SURF feature and x axis of object frame, store this as delta theta
        delta_theta = math.atan2(surf_dir_obj[1, 0], surf_dir_obj[0, 0])
        surf_directions.append(delta_theta)

    return {
        'descriptors':
        model_surf_descriptors,
        'directions':
        surf_directions,
        'contact_bf':
        mid_contact_bf,
        'closest_feature':
        surf_closest_fea[0],
        #'object_frame_bf': [np.mean(np.matrix(surf_loc3d_arr_bf), 1), create_frame(surf_loc3d_arr_bf)],
        'object_frame_pro': [
            np.mean(np.matrix(surf_loc_3d_pro), 1), object_frame_pro,
            surf_loc_3d_pro
        ]
    }
import roslib

roslib.load_manifest("hai_sandbox")
import cv
import hai_sandbox.features as fea
import hrl_camera.ros_camera as rc
import rospy

# prosilica = rc.Prosilica('prosilica', 'streaming')
# prosilica = rc.ROSCamera('/narrow_stereo/right/image_rect')
prosilica = rc.ROSCamera("/wide_stereo/right/image_rect_color")
cv.NamedWindow("surf", 1)
while not rospy.is_shutdown():
    f = prosilica.get_frame()
    loc, desc = fea.surf_color(f, params=(0, 3000, 3, 4))
    fdrawn = fea.draw_surf(f, loc, (0, 255, 0))
    cv.ShowImage("surf", fdrawn)
    cv.WaitKey(33)