示例#1
0
def test_imdb(net, imdb, anchors):
    """ Test a region proposal network on a image dataset  """
    output_dir = get_output_dir(imdb, net)
    cache_file = os.path.join(output_dir, 'res_boxes.pkl')

    # load cache result boxes (filtered)
    if os.path.exists(cache_file):
        with open(cache_file, 'rb') as f:
            proposal_boxes = cPickle.load(f)
        print 'load res boxes from \'{}\''.format(cache_file)
        return proposal_boxes

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    print 'Generating proposal boxes by rpn model...'
    proposal_boxes = test_net(net, imdb, anchors)
    print 'Get proposal boxes done!'

    print 'Current NMS configuration:'
    print NMS_CONFIG

    expand_val = lambda boxes: np.array([
        boxes[:, 0] - boxes[:, 2], boxes[:, 1] - boxes[:, 3], boxes[:, 2] -
        boxes[:, 0], boxes[:, 3] - boxes[:, 1],
        np.zeros(boxes.shape[0])
    ]).T * EXPAND_RATIO

    # filter boxes
    print 'Filtering proposal boxes...'
    for i in xrange(len(proposal_boxes)):
        proposal_boxes[i] = boxes_filter(
            proposal_boxes[i],
            PRE_NMS_TOPN=NMS_CONFIG['PRE_NMS_TOPN'],
            NMS_THRESH=NMS_CONFIG['NMS_THRESH'],
            POST_NMS_TOPN=NMS_CONFIG['POST_NMS_TOPN'],
            CONF_THRESH=CONF_THRESH,
            USE_GPU=NMS_CONFIG['USE_GPU'])

        # expand bounding box
        if len(proposal_boxes[i]) > 0:
            proposal_boxes[i] = proposal_boxes[i] + expand_val(
                proposal_boxes[i])
        print 'filter proposal box: {:d}/{:d}'.format(i + 1,
                                                      len(proposal_boxes))
    print 'Filter proposal boxes done!'

    # save file
    with open(cache_file, 'wb') as f:
        cPickle.dump(proposal_boxes, f, cPickle.HIGHEST_PROTOCOL)
        print 'save result boxes to `{:s}`'.format(cache_file)

    return proposal_boxes
示例#2
0
def test_imdb(net, imdb, anchors):
    """ Test a region proposal network on a image dataset  """
    output_dir = get_output_dir(imdb, net)
    cache_file = os.path.join(output_dir, 'res_boxes.pkl')
    
    # load cache result boxes (filtered)
    if os.path.exists(cache_file):
        with open(cache_file, 'rb') as f:
            proposal_boxes = cPickle.load(f)
        print 'load res boxes from \'{}\''.format(cache_file)
        return proposal_boxes
    
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
   
    print 'Generating proposal boxes by rpn model...'
    proposal_boxes = test_net(net, imdb, anchors)
    print 'Get proposal boxes done!'
    
    print 'Current NMS configuration:'
    print NMS_CONFIG

    expand_val = lambda boxes: np.array([boxes[:,0] - boxes[:,2], boxes[:,1] - boxes[:,3],
                                     boxes[:,2] - boxes[:,0], boxes[:,3] - boxes[:,1],
                                     np.zeros(boxes.shape[0])]).T * EXPAND_RATIO    
    
    # filter boxes
    print 'Filtering proposal boxes...'
    for i in xrange(len(proposal_boxes)):
        proposal_boxes[i] = boxes_filter(proposal_boxes[i], 
                PRE_NMS_TOPN=NMS_CONFIG['PRE_NMS_TOPN'], 
                NMS_THRESH=NMS_CONFIG['NMS_THRESH'], 
                POST_NMS_TOPN=NMS_CONFIG['POST_NMS_TOPN'],
                CONF_THRESH=CONF_THRESH,
                USE_GPU=NMS_CONFIG['USE_GPU'])

        # expand bounding box
        if len(proposal_boxes[i]) > 0:
            proposal_boxes[i] = proposal_boxes[i] + expand_val(proposal_boxes[i])
        print 'filter proposal box: {:d}/{:d}'.format(i+1, len(proposal_boxes))
    print 'Filter proposal boxes done!'
    
    # save file
    with open(cache_file, 'wb') as f:
        cPickle.dump(proposal_boxes, f, cPickle.HIGHEST_PROTOCOL)
        print 'save result boxes to `{:s}`'.format(cache_file)
 
    return proposal_boxes
示例#3
0
def test_net(net, imdb, base_anchors_file):
    """ Test a region proposal network on a image database. """
    output_dir = get_output_dir(imdb, net)
    cache_file = os.path.join(output_dir, 'proposal_boxes.pkl')

    # load cache proposal boxes
    if os.path.exists(cache_file):
        with open(cache_file, 'rb') as f:
            proposal_boxes = cPickle.load(f)
        print 'load proposal boxes from \'{}\''.format(cache_file)
        return proposal_boxes

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    #######  have no cache proposal boxes
    num_images = len(imdb.image_index)

    # all detections are collected into:
    #    all_boxes[image] = N x 5 array of detections in
    #    (x1, y1, x2, y2, score)
    all_boxes = [[] for _ in xrange(num_images)]

    # timers
    _t = {'im_detect': Timer(), 'misc': Timer()}

    # generate proposal boxes
    for i in xrange(num_images):
        img_path = imdb.image_path_at(i)
        _t['im_detect'].tic()
        im = cv2.imread(img_path)
        scores, boxes = im_detect(net, im, base_anchors_file)
        _t['im_detect'].toc()

        all_boxes[i] = np.hstack((boxes, scores)).astype(np.float32,
                                                         copy=False)

        print 'gen_proposal: {:d}/{:d} {:.3f}s' \
              .format(i + 1, num_images, _t['im_detect'].average_time)

    # save file
    with open(cache_file, 'wb') as f:
        cPickle.dump(all_boxes, f, cPickle.HIGHEST_PROTOCOL)

    return all_boxes
示例#4
0
def test_net(net, imdb, base_anchors_file):
    """ Test a region proposal network on a image database. """
    output_dir = get_output_dir(imdb, net)
    cache_file = os.path.join(output_dir, 'proposal_boxes.pkl')
    
    # load cache proposal boxes
    if os.path.exists(cache_file):
        with open(cache_file, 'rb') as f:
            proposal_boxes = cPickle.load(f)
        print 'load proposal boxes from \'{}\''.format(cache_file)
        return proposal_boxes
    
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    #######  have no cache proposal boxes
    num_images = len(imdb.image_index)

    # all detections are collected into:
    #    all_boxes[image] = N x 5 array of detections in
    #    (x1, y1, x2, y2, score)
    all_boxes = [[] for _ in xrange(num_images)]
    
    # timers
    _t = {'im_detect' : Timer(), 'misc' : Timer()}
    
    # generate proposal boxes
    for i in xrange(num_images):
        img_path = imdb.image_path_at(i)
        _t['im_detect'].tic()
        im = cv2.imread(img_path)
        scores, boxes = im_detect(net, im, base_anchors_file)
        _t['im_detect'].toc()
        
        all_boxes[i] = np.hstack((boxes, scores)).astype(np.float32, copy=False)
        
        print 'gen_proposal: {:d}/{:d} {:.3f}s' \
              .format(i + 1, num_images, _t['im_detect'].average_time)
    
    # save file
    with open(cache_file, 'wb') as f:
        cPickle.dump(all_boxes, f, cPickle.HIGHEST_PROTOCOL)
    
    return all_boxes
示例#5
0
    if args.cpu_mode:
        caffe.set_mode_cpu()
    else:
        caffe.set_mode_gpu()
        caffe.set_device(args.gpu_id)

    net = caffe.Net(prototxt, caffemodel, caffe.TEST)
    net.name = os.path.splitext(os.path.basename(caffemodel))[0]

    print '\n\nLoaded network {:s}'.format(caffemodel)

    if args.imdb_name:
        imdb = get_imdb(args.imdb_name)
        # res_boxes = test_imdb(net, imdb, anchors)
        res_boxes = test_imdb_comp(net, imdb, anchors)
        output_dir = get_output_dir(imdb, net)
        imdb.evaluate_detections(res_boxes, output_dir)
    else:
        # img_list = ['000012.jpg', '003681.png', '000008.png', '000010.png', '000013.png',
        #             '000004.jpg', '000018.png', '000022.png', '000047.png', '000056.png',
        #             '001111.jpg']
        img_list = ['1.jpg', '2.jpg', '3.jpg', '4.jpg', '5.jpg']

        for img_name in img_list:
            print '~' * 20
            print 'Demo for image: data/demo/' + img_name
            demo(net, img_name, anchors)

        plt.show()
    anchordb = {
        'anchors': anchors,
        'output_width_map': output_w,
        'output_height_map': output_h
    }

    print 'stage-pre done!'

    # =============== stage-1 training rpn with imagenet parameters
    if current_stage <= 1:
        print 'start stage-1...'
        imdb = get_imdb(args.imdb_name)
        print 'Loaded dataset `{:s}` for training'.format(imdb.name)

        roidb = rpn_train.get_training_roidb(imdb)
        output_dir = rpn_config.get_output_dir(imdb, None)
        print 'Output will be saved to `{:s}`'.format(output_dir)

        ### mix anothor dataset
        if args.mix_imdb_name != None:
            imdb_mix = get_imdb(args.mix_imdb_name)
            print 'Loaded dataset `{:s}` for training'.format(imdb_mix.name)
            roidb_mix = rpn_train.get_training_roidb(imdb_mix)
            roidb.extend(roidb_mix)
        ###

        stage1_model = rpn_train.train_net(
            train_cfg.stage1.solver,
            roidb,
            anchordb,
            output_dir,
                                                             args.test_def)
    anchors, anchors_file = rpn_train.proposal_generate_anchors(args.imdb_name)
    anchordb = {'anchors': anchors, 
                'output_width_map': output_w,
                'output_height_map': output_h
                } 
    
    print 'stage-pre done!'
    
# =============== stage-1 training rpn with imagenet parameters 
    print 'start train rpn model...'
    imdb = get_imdb(args.imdb_name)
    print 'Loaded dataset `{:s}` for training'.format(imdb.name)

    roidb = rpn_train.get_training_roidb(imdb)
    output_dir = rpn_config.get_output_dir(imdb, None)
    print 'Output will be saved to `{:s}`'.format(output_dir)
    
    ### mix anothor dataset
    if args.mix_imdb_name != None:
        imdb_mix = get_imdb(args.mix_imdb_name)
        roidb_mix = rpn_train.get_training_roidb(imdb_mix)
        roidb.extend(roidb_mix)
    ### 

    rpn_model = rpn_train.train_net(args.solver, 
              roidb, anchordb, output_dir, final_name=imdb.name,
              pretrained_model=args.pretrained_model,
              max_iters=args.max_iters)
    print 'training rpn finished!'
    
示例#8
0
    
    if args.cpu_mode:
        caffe.set_mode_cpu()
    else:
        caffe.set_mode_gpu()
        caffe.set_device(args.gpu_id)

    net = caffe.Net(prototxt, caffemodel, caffe.TEST)
    net.name = os.path.splitext(os.path.basename(caffemodel))[0]
    print '\n\nLoaded network {:s}'.format(caffemodel)
    
    if args.imdb_name:
        imdb = get_imdb(args.imdb_name)
        res_boxes = test_imdb(net, imdb, anchors)
        calc_precision_recall(res_boxes, imdb)
        output_dir = get_output_dir(imdb, net)
        imdb.evaluate_detections(res_boxes, output_dir)
    elif args.bad_list:
        bad_list_file = args.bad_list

        if not os.path.exists(bad_list_file):
            print 'Can\'t open bad case list file: `{:s}`'.format(bad_list_file)
            sys.exit()

        new_res_dir_name = 'res_' + os.path.splitext(os.path.basename(bad_list_file))[0]

        with open(bad_list_file, 'r') as f:
            img_list = [x.strip() for x in f.readlines()]
        
        for img_name in img_list:
            print '~' * 20