def match_images(model_img, cand_img, threshold=.8):
    #pdb.set_trace()
    mgray = fea.grayscale(model_img)
    cgray = fea.grayscale(cand_img)
    
    m_loc, m_desc = fea.surf(mgray)
    dirs = [direction for loc, lap, size, direction, hess in m_loc]
    print 'max min dirs', np.min(dirs), np.max(dirs)

    c_loc, c_desc = fea.surf(cgray)
    
    features_db = sp.KDTree(np.array(m_desc))
    matched = []
    for i, desc in enumerate(c_desc):
        dists, idxs = features_db.query(np.array(desc), 2)
        ratio = dists[0] / dists[1]
        #print "%d %.4f" % (i, ratio),
        if ratio < threshold:
            matched.append((i, idxs[0]))
            #print 'matched!', idxs[0]
        #else:
        #    print 'X|'
    
    c_loc_moved = []
    for loc, lap, size, d, hess in c_loc:
        x, y = loc
        nloc = (x + model_img.width, y)
        c_loc_moved.append((nloc, lap, size, d, hess))
    
    c_loc_matched, m_loc_matched = zip(*[[c_loc_moved[i], m_loc[j]] for i, j in matched])
    joint = concat_images(model_img, cand_img)
    
    joint_viz = joint
    #joint_viz = fea.draw_surf(joint, c_loc_moved, (255,0,0))
    #joint_viz = fea.draw_surf(joint_viz, c_loc_matched, (0,255,0))
    #joint_viz = fea.draw_surf(joint_viz, m_loc, (255,0,0))
    #joint_viz = fea.draw_surf(joint_viz, m_loc_matched, (0,255,0))
    for cloc, mloc in zip(c_loc_matched, m_loc_matched):
        cloc2d, _, _, _, _ = cloc
        mloc2d, _, _, _, _ = mloc
        cv.Line(joint_viz, cloc2d, mloc2d, (0,255,0), 1, cv.CV_AA)
    print '%d matches found' % len(matched)
    
    return joint_viz
def find_image_features(bagname, topic):
    features_list = []
    bridge = CvBridge()
    i = 0
    for topic, msg, t in ru.bag_iter(bagname, [topic]):
        t = msg.header.stamp.to_time()
        image = bridge.imgmsg_to_cv(msg, 'bgr8')
        image_gray = fea.grayscale(image)
        surf_keypoints, surf_descriptors = fea.surf(image_gray)
        features_list.append((t, (surf_keypoints, surf_descriptors)))
        rospy.loginfo("%.3f frame %d found %d points" % (t, i, len(surf_keypoints)))
        i = i + 1
    return features_list
Exemple #3
0
    def run(self):
        cv.NamedWindow('surf', 1)
        while not rospy.is_shutdown():
            image = self.camera.get_frame()
            image_gray = fea.grayscale(image)
            surf_keypoints, surf_descriptors = fea.surf(image_gray)
            vis_img = fea.draw_surf(image, surf_keypoints, (255, 0, 0))
        
            #Project the tip of the gripper (both of them) into the image frame
            img_ll = self.camera_geo.project(np.matrix([0,0,0.]).T, self.tflistener, self.finger_tips[2])
            img_lr = self.camera_geo.project(np.matrix([0,0,0.]).T, self.tflistener, self.finger_tips[3])

            cv.Circle(vis_img, tuple(np.matrix(np.round(img_ll), dtype='int').A1.tolist()), 30, (0, 255, 0), 1, cv.CV_AA)
            cv.Circle(vis_img, tuple(np.matrix(np.round(img_lr), dtype='int').A1.tolist()), 30, (0, 255, 0), 1, cv.CV_AA)
            cv.ShowImage('surf', vis_img)
            cv.WaitKey(10)
Exemple #4
0
    def run(self):
        cv.NamedWindow('surf', 1)
        while not rospy.is_shutdown():
            image = self.camera.get_frame()
            image_gray = fea.grayscale(image)
            surf_keypoints, surf_descriptors = fea.surf(image_gray)
            vis_img = fea.draw_surf(image, surf_keypoints, (255, 0, 0))

            #Project the tip of the gripper (both of them) into the image frame
            img_ll = self.camera_geo.project(
                np.matrix([0, 0, 0.]).T, self.tflistener, self.finger_tips[2])
            img_lr = self.camera_geo.project(
                np.matrix([0, 0, 0.]).T, self.tflistener, self.finger_tips[3])

            cv.Circle(
                vis_img,
                tuple(np.matrix(np.round(img_ll), dtype='int').A1.tolist()),
                30, (0, 255, 0), 1, cv.CV_AA)
            cv.Circle(
                vis_img,
                tuple(np.matrix(np.round(img_lr), dtype='int').A1.tolist()),
                30, (0, 255, 0), 1, cv.CV_AA)
            cv.ShowImage('surf', vis_img)
            cv.WaitKey(10)
Exemple #5
0
import roslib; roslib.load_manifest('hai_sandbox')
import cv
import sys
import hai_sandbox.features as fea

if __name__ == '__main__':
    fname = sys.argv[1]
    image = cv.LoadImage(fname)
    image_gray = cv.CreateImage((640,480), cv.IPL_DEPTH_8U,1)
    cv.CvtColor(image, image_gray, cv.CV_BGR2GRAY)

    star_keypoints = fea.star(image) 
    surf_keypoints, surf_descriptors = fea.surf(image_gray)
    harris_keypoints = fea.harris(image_gray)

    cv.NamedWindow('surf', 1)
    cv.NamedWindow('harris', 1)
    cv.NamedWindow('star', 1)
    while True:
        cv.ShowImage('surf', fea.draw_surf(image, surf_keypoints, (255, 0, 0)))
        cv.ShowImage('harris', fea.draw_harris(image, harris_keypoints, (0, 255, 0)))
        cv.ShowImage('star', fea.draw_star(image, star_keypoints, (0, 0, 255)))
        k = cv.WaitKey(33)
        if k == 27:
            break

    #Canny(image, edges, threshold1, threshold2, aperture_size=3) => None
if __name__ == '__main__':
    mode = 'image'
    #if mode = 'image':
    #    find pose of model
    #    find normal of model
    #    record angles of features.

    if mode=='image':
        test_thresholds()

    if mode=='live':
        model_name = sys.argv[1]
        model_img = cv.LoadImage(model_name)
        model_gray = fea.grayscale(model_img)
        msurf_loc, msurf_desc = fea.surf(model_gray)
        prosilica = rc.Prosilica('prosilica', 'streaming')
        cv.NamedWindow('surf', 1)
        si = ShowImage('surf')
        si.start()

        #Each feature is a row in db
        features_db = sp.KDTree(np.array(msurf_desc))
        #pdb.set_trace()
 
        while not rospy.is_shutdown():
            print '..'
            image = prosilica.get_frame()
            print 'saving image'
            cv.SaveImage('frame.png', image)
            print '>'
Exemple #7
0
        contact_locs = ut.load_pickle('contact_locs_proc.pkl')
    except Exception, e:
        print e
        print 'listening'
        et = ListenAndFindContactLocs()
        r = rospy.Rate(10)
        while not rospy.is_shutdown() and not et.contact_stopped:
            r.sleep()
        contact_locs = et.contact_locs
        ut.save_pickle(et.contact_locs, 'contact_locs_proc.pkl')

    #Detect features, get 3d location for each feature
    print 'detecting features'
    proc_img = cv.LoadImage(proc_img_name)
    proc_gray = fea.grayscale(proc_img)
    sloc, sdesc = fea.surf(proc_gray)
    proc_surfed = fea.draw_surf(proc_img, sloc, (200, 0, 0))

    ######################################################################################
    # get 3d locations of surf features, get closest surf feature to gripper tips
    ######################################################################################
    #import pdb
    #pdb.set_trace()
    point_cloud_pro = data_dict['pro_T_bf'] * np.row_stack(
        (point_cloud, 1 + np.zeros((1, point_cloud.shape[1]))))
    #project 3d points to 2d
    point_cloud_2d = data_dict['camera_info'].project(point_cloud_pro[0:3, :])
    point_cloud_2d_tree = sp.KDTree(np.array(point_cloud_2d.T))

    #2d surf => 3d loc
    surf_loc3d = []
import roslib; roslib.load_manifest('hai_sandbox')
from cv_bridge.cv_bridge import CvBridge, CvBridgeError
import cv
import sys
import hrl_lib.rutils as ru
import hai_sandbox.features as fea

forearm_cam_l = '/l_forearm_cam/image_rect_color'
ws_l = '/wide_stereo/left/image_rect_color'
ws_r = '/wide_stereo/right/image_rect_color'

fname = sys.argv[1]
bridge = CvBridge()

cv.NamedWindow('surf', 1)
cv.NamedWindow('harris', 1)
cv.NamedWindow('star', 1)

for topic, msg, t in ru.bag_iter(fname, [ws_l]):
    image = bridge.imgmsg_to_cv(msg, 'bgr8')
    image_gray = fea.grayscale(image)

    surf_keypoints, surf_descriptors = fea.surf(image_gray)
    cv.ShowImage('surf', fea.draw_surf(image, surf_keypoints, (255, 0, 0)))

    harris_keypoints = fea.harris(image_gray)
    cv.ShowImage('harris', fea.draw_harris(image, harris_keypoints, (0, 255, 0)))

    cv.WaitKey(10)
    
Exemple #9
0
        contact_locs = ut.load_pickle('contact_locs_proc.pkl')
    except Exception, e:
        print e
        print 'listening'         
        et = ListenAndFindContactLocs()
        r = rospy.Rate(10)
        while not rospy.is_shutdown() and not et.contact_stopped:
            r.sleep()
        contact_locs = et.contact_locs
        ut.save_pickle(et.contact_locs, 'contact_locs_proc.pkl')

    #Detect features, get 3d location for each feature
    print 'detecting features'
    proc_img = cv.LoadImage(proc_img_name)
    proc_gray = fea.grayscale(proc_img)
    sloc, sdesc = fea.surf(proc_gray)
    proc_surfed = fea.draw_surf(proc_img, sloc, (200, 0, 0))

    ######################################################################################
    # get 3d locations of surf features, get closest surf feature to gripper tips
    ######################################################################################
    #import pdb
    #pdb.set_trace()
    point_cloud_pro = data_dict['pro_T_bf'] * np.row_stack((point_cloud, 1+np.zeros((1, point_cloud.shape[1]))))
    #project 3d points to 2d
    point_cloud_2d = data_dict['camera_info'].project(point_cloud_pro[0:3,:])
    point_cloud_2d_tree = sp.KDTree(np.array(point_cloud_2d.T))

    #2d surf => 3d loc
    surf_loc3d = []
    for loc, lap, size, d, hess in sloc: