예제 #1
0
def demo(net, image_name, anchor_file):
    # Load the demo image
    im_file = os.path.join(cfg.ROOT_DIR, 'data', 'demo', image_name)
    if not os.path.exists(im_file):
        print 'Image `{:s}` not found!'.format(image_name)
        return
    im = cv2.imread(im_file)

    # Detect all object classes and regress object bounds
    timer = Timer()
    timer.tic()
    scores, boxes = im_detect(net, im, anchor_file)
    timer.toc()
    print('Detection took {:.3f}s for '
          '{:d} object proposals').format(timer.total_time, boxes.shape[0])

    # Visualize detections for each class
    cls = 'obj'
    dets = np.hstack((boxes, scores)).astype(np.float32, copy=False)
    dets = boxes_filter(
        dets,
        2000,  # NMS_CONFIG['PRE_NMS_TOPN'], 
        0.3,  # NMS_CONFIG['NMS_THRESH'], 
        10  #NMS_CONFIG['POST_NMS_TOPN']
    )
    CONF_THRESH = 0.99
    print 'All {} detections with p({} | box) >= {:.1f}'.format(
        cls, cls, CONF_THRESH)
    # vis_detections(im, cls, dets, thresh=CONF_THRESH)
    res_im_file = os.path.join(cfg.ROOT_DIR, 'data', 'demo',
                               'res_' + image_name)
    save_detection_res(im, res_im_file, dets, CONF_THRESH)
예제 #2
0
def demo(net, image_name, anchor_file):
    # Load the demo image
    im_file = os.path.join(cfg.ROOT_DIR, 'data', 'demo', image_name)
    if not os.path.exists(im_file):
        print 'Image `{:s}` not found!'.format(image_name)
        return 
    im = cv2.imread(im_file)

    # Detect all object classes and regress object bounds
    timer = Timer()
    timer.tic()
    scores, boxes = im_detect(net, im, anchor_file)
    timer.toc()
    print ('Detection took {:.3f}s for '
           '{:d} object proposals').format(timer.total_time, boxes.shape[0])

    # Visualize detections for each class
    cls = 'obj'
    dets = np.hstack((boxes, scores)).astype(np.float32, copy=False)
    dets = boxes_filter(dets, 2000, # NMS_CONFIG['PRE_NMS_TOPN'], 
                              0.3,  # NMS_CONFIG['NMS_THRESH'], 
                              10 #NMS_CONFIG['POST_NMS_TOPN']
                        )
    CONF_THRESH = 0.99
    print 'All {} detections with p({} | box) >= {:.1f}'.format(cls, cls,
                                                                CONF_THRESH)
    # vis_detections(im, cls, dets, thresh=CONF_THRESH)
    res_im_file = os.path.join(cfg.ROOT_DIR, 'data', 'demo', 'res_'+image_name)
    save_detection_res(im, res_im_file, dets, CONF_THRESH)
예제 #3
0
def demo(net, im_path, anchor_file, des_dir='demo'):
    # Load the demo image
    if not os.path.exists(im_path):
        print 'Image `{:s}` not found!'.format(im_path)
        return 
    #im = cv2.imread(im_path)
    im = exif.load_exif_jpg(im_path)
    #wdth = 1280
    #eight = 720
    #ulti = img.shape[0]*img.shape[1]*1.0/(width*height)


    
    # Detect all object classes and regress object bounds
    timer = Timer()
    timer.tic()
    scores, boxes = im_detect(net, im, anchor_file)
    #print scores
    #print boxes
    #print len(scores)
    timer.toc()
    print ('Detection took {:.3f}s for '
           '{:d} object proposals').format(timer.total_time, boxes.shape[0])

    # Visualize detections for each class
    cls = 'obj'
    dets = np.hstack((boxes, scores)).astype(np.float32, copy=False)
    dets = boxes_filter(dets, NMS_CONFIG['PRE_NMS_TOPN'], 
                              NMS_CONFIG['NMS_THRESH'], 
                              NMS_CONFIG['POST_NMS_TOPN'],
                              CONF_THRESH
                        )
    print 'All {} detections with p({} | box) >= {:.1f}'.format(cls, cls, CONF_THRESH)
    #vis_detections(im, cls, dets, thresh=CONF_THRESH)
    #print dets
    # save result images
    output_dir = os.path.join(cfg.ROOT_DIR, 'data', des_dir)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # image_name = os.path.splitext(os.path.basename(im_path))[0]
    # res_im_file = os.path.join(output_dir, 'res_'+image_name+'.jpg')
    image_name = '_'.join(im_path.split('/')[-3:])
    res_im_file = os.path.join(output_dir, image_name)

    #save_detection_res(im, res_im_file, dets)
    return (dets)
예제 #4
0
def demo(net, im_path, anchor_file, des_dir='demo'):
    # Load the demo image
    if not os.path.exists(im_path):
        print 'Image `{:s}` not found!'.format(im_path)
        return
    #im = cv2.imread(im_path)
    im = exif.load_exif_jpg(im_path)
    #wdth = 1280
    #eight = 720
    #ulti = img.shape[0]*img.shape[1]*1.0/(width*height)

    # Detect all object classes and regress object bounds
    timer = Timer()
    timer.tic()
    scores, boxes = im_detect(net, im, anchor_file)
    #print scores
    #print boxes
    #print len(scores)
    timer.toc()
    print('Detection took {:.3f}s for '
          '{:d} object proposals').format(timer.total_time, boxes.shape[0])

    # Visualize detections for each class
    cls = 'obj'
    dets = np.hstack((boxes, scores)).astype(np.float32, copy=False)
    dets = boxes_filter(dets, NMS_CONFIG['PRE_NMS_TOPN'],
                        NMS_CONFIG['NMS_THRESH'], NMS_CONFIG['POST_NMS_TOPN'],
                        CONF_THRESH)
    print 'All {} detections with p({} | box) >= {:.1f}'.format(
        cls, cls, CONF_THRESH)
    #vis_detections(im, cls, dets, thresh=CONF_THRESH)
    #print dets
    # save result images
    output_dir = os.path.join(cfg.ROOT_DIR, 'data', des_dir)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # image_name = os.path.splitext(os.path.basename(im_path))[0]
    # res_im_file = os.path.join(output_dir, 'res_'+image_name+'.jpg')
    image_name = '_'.join(im_path.split('/')[-3:])
    res_im_file = os.path.join(output_dir, image_name)

    #save_detection_res(im, res_im_file, dets)
    return (dets)
예제 #5
0
def demo(rpn_net, fast_rcnn_net, anchor_file, image_name, classes=CLASSES, 
        share_conv=False, last_shared_blob_name=None):
    """
    Detect object classes in an image.
    The proposals are generating by region proposal network.
    """
    # Load the demo image
    im_file = os.path.join(rpn_config.cfg.ROOT_DIR, 'data', 'demo', image_name)
    im = cv2.imread(im_file)

    # timers
    _t = {'im_detect' : Timer(), 'misc' : Timer()}
    
    # ####### RPN 
    # Generate the proposal boxes
    _t['im_detect'].tic()
    scores, boxes = rpn_test.im_detect(rpn_net, im, anchor_file)
    _t['im_detect'].toc()
    print 'image: {:s} num proposal: {:d}'.format(image_name, boxes.shape[0])
    
    # Filter the proposal boxes
    _t['misc'].tic()
    obj_proposals = np.hstack((boxes, scores)).astype(np.float32, copy=False)
    obj_proposals = test_rpn_net.boxes_filter(obj_proposals, 6000, 0.7, 300)
    _t['misc'].toc()
    print 'image: {:s} num proposal filtered: {:d}'.format(image_name, 
                                                    obj_proposals.shape[0])

    print ('Actions took {:.3f}s for generating'
            '{:d} proposal boxes, {:.3f}s for '
            'filtering proposals.').format(_t['im_detect'].total_time, 
                    boxes.shape[0], _t['misc'].total_time)


    # ###### Fast-RCNN
    # Detect all object classes and regress object bounds
    _t['im_detect'].tic()
    if share_conv:
        # conv_feat_blob = rpn_net.blobs['conv5'].data
        scores, boxes = fast_rcnn_test.im_detect(fast_rcnn_net, im, obj_proposals[:, 0:4],
                                            rpn_net.blobs[last_shared_blob_name].data)
    else:
        scores, boxes = fast_rcnn_test.im_detect(fast_rcnn_net, im, obj_proposals[:, 0:4])
    _t['im_detect'].toc()
    print 'image: {:s} num obj boxes: {:d}'.format(image_name, boxes.shape[0])
    
    # Visualize detections for each class
    CONF_THRESH = 0.6
    NMS_THRESH = 0.3
    _t['misc'].tic()

    plt.figure()
    for cls in classes:
        cls_ind = CLASSES.index(cls)
        cls_boxes = boxes[:, 4*cls_ind:4*(cls_ind + 1)]
        cls_scores = scores[:, cls_ind]
        dets = np.hstack((cls_boxes,
                          cls_scores[:, np.newaxis])).astype(np.float32)
        keep = nms(dets, NMS_THRESH)
        dets = dets[keep, :]
        print 'All {} detections with p({} | box) >= {:.1f}'.format(cls, cls, CONF_THRESH)
        vis_detections(im, cls, dets, thresh=CONF_THRESH)
    _t['misc'].toc()
    
    print 'Actions took {:.3f}s for detecton, {:.3f}s for nms.' \
              .format( _t['im_detect'].total_time, _t['misc'].total_time)
예제 #6
0
def demo(rpn_net,
         fast_rcnn_net,
         anchor_file,
         image_name,
         classes=CLASSES,
         share_conv=False,
         last_shared_blob_name=None):
    """
    Detect object classes in an image.
    The proposals are generating by region proposal network.
    """
    # Load the demo image
    im_file = os.path.join(rpn_config.cfg.ROOT_DIR, 'data', 'demo', image_name)
    im = cv2.imread(im_file)

    # timers
    _t = {'im_detect': Timer(), 'misc': Timer()}

    # ####### RPN
    # Generate the proposal boxes
    _t['im_detect'].tic()
    scores, boxes = rpn_test.im_detect(rpn_net, im, anchor_file)
    _t['im_detect'].toc()
    print 'image: {:s} num proposal: {:d}'.format(image_name, boxes.shape[0])

    # Filter the proposal boxes
    _t['misc'].tic()
    obj_proposals = np.hstack((boxes, scores)).astype(np.float32, copy=False)
    obj_proposals = test_rpn_net.boxes_filter(obj_proposals, 6000, 0.7, 300)
    _t['misc'].toc()
    print 'image: {:s} num proposal filtered: {:d}'.format(
        image_name, obj_proposals.shape[0])

    print(
        'Actions took {:.3f}s for generating'
        '{:d} proposal boxes, {:.3f}s for '
        'filtering proposals.').format(_t['im_detect'].total_time,
                                       boxes.shape[0], _t['misc'].total_time)

    # ###### Fast-RCNN
    # Detect all object classes and regress object bounds
    _t['im_detect'].tic()
    if share_conv:
        # conv_feat_blob = rpn_net.blobs['conv5'].data
        scores, boxes = fast_rcnn_test.im_detect(
            fast_rcnn_net, im, obj_proposals[:, 0:4],
            rpn_net.blobs[last_shared_blob_name].data)
    else:
        scores, boxes = fast_rcnn_test.im_detect(fast_rcnn_net, im,
                                                 obj_proposals[:, 0:4])
    _t['im_detect'].toc()
    print 'image: {:s} num obj boxes: {:d}'.format(image_name, boxes.shape[0])

    # Visualize detections for each class
    CONF_THRESH = 0.6
    NMS_THRESH = 0.3
    _t['misc'].tic()

    plt.figure()
    for cls in classes:
        cls_ind = CLASSES.index(cls)
        cls_boxes = boxes[:, 4 * cls_ind:4 * (cls_ind + 1)]
        cls_scores = scores[:, cls_ind]
        dets = np.hstack(
            (cls_boxes, cls_scores[:, np.newaxis])).astype(np.float32)
        keep = nms(dets, NMS_THRESH)
        dets = dets[keep, :]
        print 'All {} detections with p({} | box) >= {:.1f}'.format(
            cls, cls, CONF_THRESH)
        vis_detections(im, cls, dets, thresh=CONF_THRESH)
    _t['misc'].toc()

    print 'Actions took {:.3f}s for detecton, {:.3f}s for nms.' \
              .format( _t['im_detect'].total_time, _t['misc'].total_time)