예제 #1
0
def from_dets(imdb_name, output_dir, args):
    imdb = get_imdb(imdb_name)
    imdb.competition_mode(args.comp_mode)
    imdb.config['matlab_eval'] = args.matlab_eval
    with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f:
        dets = cPickle.load(f)
    #pose_file = os.path.join(output_dir, 'detections_pose.txt')
    #with open(pose_file, 'w') as f:
    #    for i in xrange(0, len(dets[0])):
    #        for j in xrange(1, len(dets)):
    #             det = dets[j][i]
    #             for d in det:
    #                  f.write("{:d} {:d} {:.1f} {:.1f} {:.1f} {:.1f} {:.3f} {:.3f} {:.3f}\n".format(i, j,
    #                           d[0], d[1], d[2], d[3], 15.0*d[4], 15.0*d[5], d[6]))
    
    #matlab_cmd = "test_pose_avp('%s');" % (pose_file)
    #print matlab_cmd
    #os.system('matlab -nodisplay -r "addpath(\'tools\');%s;quit"' % (matlab_cmd)) 
    
    if args.apply_nms:
        print 'Applying NMS to all detections'
        nms_dets = apply_nms(dets, cfg.TEST.NMS)
    else:
        nms_dets = dets

    print 'Evaluating detections'
    imdb.evaluate_detections(nms_dets, output_dir)
예제 #2
0
def from_dets(imdb_name, output_dir, args):
    imdb = get_imdb(imdb_name)
    imdb.competition_mode(args.comp_mode)
    imdb.config['matlab_eval'] = args.matlab_eval
    with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f:
        dets = cPickle.load(f)
    #pose_file = os.path.join(output_dir, 'detections_pose.txt')
    #with open(pose_file, 'w') as f:
    #    for i in xrange(0, len(dets[0])):
    #        for j in xrange(1, len(dets)):
    #             det = dets[j][i]
    #             for d in det:
    #                  f.write("{:d} {:d} {:.1f} {:.1f} {:.1f} {:.1f} {:.3f} {:.3f} {:.3f}\n".format(i, j,
    #                           d[0], d[1], d[2], d[3], 15.0*d[4], 15.0*d[5], d[6]))

    #matlab_cmd = "test_pose_avp('%s');" % (pose_file)
    #print matlab_cmd
    #os.system('matlab -nodisplay -r "addpath(\'tools\');%s;quit"' % (matlab_cmd))

    if args.apply_nms:
        print 'Applying NMS to all detections'
        nms_dets = apply_nms(dets, cfg.TEST.NMS)
    else:
        nms_dets = dets

    print 'Evaluating detections'
    imdb.evaluate_detections(nms_dets, output_dir)
예제 #3
0
def from_dets(imdb_name, output_dir, comp_mode):
    imdb = get_imdb(imdb_name)
    imdb.competition_mode(comp_mode)
    with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f:
        dets = cPickle.load(f)

    print 'Applying NMS to all detections'
    nms_dets = apply_nms(dets, cfg.TEST.NMS)

    print 'Evaluating detections'
    imdb.evaluate_detections(nms_dets, output_dir)
예제 #4
0
파일: reval.py 프로젝트: usepavlov/spot
def from_dets(imdb_name, output_dir, comp_mode):
    imdb = get_imdb(imdb_name)
    imdb.competition_mode(comp_mode)
    with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f:
        dets = cPickle.load(f)

    print 'Applying NMS to all detections'
    nms_dets = apply_nms(dets, cfg.TEST.NMS)

    print 'Evaluating detections'
    imdb.evaluate_detections(nms_dets, output_dir)
예제 #5
0
def from_dets(imdb_name, output_dir, args):
    imdb = get_imdb(imdb_name)
    imdb.competition_mode(args.comp_mode)
    imdb.config['matlab_eval'] = args.matlab_eval
    with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f:
        dets = cPickle.load(f)

    print 'Applying NMS to all detections'
    cfg.USE_GPU_NMS = False  # much faster than GPU NMS for small number of dets
    nms_dets = apply_nms(dets, cfg.TEST.NMS)

    print 'Evaluating detections'
    imdb.evaluate_detections(nms_dets, output_dir)
예제 #6
0
def from_dets(imdb_name, output_dir, args):
    imdb = get_imdb(imdb_name)
    imdb.competition_mode(args.comp_mode)
    imdb.config['matlab_eval'] = args.matlab_eval
    with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f:
        dets = cPickle.load(f)

    print 'Applying NMS to all detections'
    cfg.USE_GPU_NMS = False # much faster than GPU NMS for small number of dets
    nms_dets = apply_nms(dets, cfg.TEST.NMS)

    print 'Evaluating detections'
    imdb.evaluate_detections(nms_dets, output_dir)
예제 #7
0
def from_dets(imdb_name, output_dir, args):
    imdb = get_imdb(imdb_name)
    imdb.competition_mode(args.comp_mode)
    imdb.config['matlab_eval'] = args.matlab_eval
    with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f:
        dets = cPickle.load(f)

    if args.apply_nms:
        print 'Applying NMS to all detections'
        nms_dets = apply_nms(dets, cfg.TEST.NMS)
    else:
        nms_dets = dets

    print 'Evaluating detections'
    imdb.evaluate_detections(nms_dets, output_dir)
예제 #8
0
def from_dets(imdb_name, output_dir, args):
    imdb = get_imdb(imdb_name)
    imdb.competition_mode(args.comp_mode)
    imdb.config['matlab_eval'] = args.matlab_eval
    with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f:
        dets = cPickle.load(f)

    if args.apply_nms:
        print('Applying NMS to all detections')
        nms_dets = apply_nms(dets, cfg.TEST.NMS)
    else:
        nms_dets = dets

    print('Evaluating detections')
    imdb.evaluate_detections(nms_dets, output_dir)
예제 #9
0
def test_net_train(net, imdb, run_name):
    """Test on an image database, 
    and generate pseudo ground truths for training fast rcnn."""
    num_images = len(imdb.image_index)
    # heuristic: keep an average of 40 detections per class per images prior
    # to NMS
    max_per_set = 40 * num_images
    # heuristic: keep at most 100 detection per class per image prior to NMS
    max_per_image = 100
    # detection thresold for each class (this is adaptively set based on the
    # max_per_set constraint)
    thresh = -np.inf * np.ones(imdb.num_classes)
    # thresh = 0.1 * np.ones(imdb.num_classes)
    # top_scores will hold one minheap of scores per class (used to enforce
    # the max_per_set constraint)
    top_scores = [[] for _ in xrange(imdb.num_classes)]
    # all detections are collected into:
    #    all_boxes[cls][image] = N x 5 array of detections in
    #    (x1, y1, x2, y2, score)
    all_boxes = [[[] for _ in xrange(num_images)]
                 for _ in xrange(imdb.num_classes)]

    output_dir = get_output_dir(imdb, net, run_name)
    print 'Output will be saved to `{:s}`'.format(output_dir)

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # timers
    _t = {'im_detect' : Timer(), 'misc' : Timer()}

    images_real = np.zeros((num_images,), dtype=object)
    gt = np.zeros((num_images, ), dtype=object)
    roidb = imdb.roidb
    
    scores_all = []
    boxes_all = []
    for i in xrange(num_images):
        im = cv2.imread(imdb.image_path_at(i))
        _t['im_detect'].tic()
        scores, boxes = im_detect(net, im, roidb[i]['boxes'])
        _t['im_detect'].toc()
        scores_all.append(scores)
        boxes_all.append(boxes)

        _t['misc'].tic()
        for j in xrange(imdb.num_classes):
            index = np.argmax(scores[:, j])
            all_boxes[j][i] = \
                np.hstack((boxes[index, j*4:(j+1)*4].reshape(1, -1), 
                           np.array([[scores[index, j]]])))

        gt_tmp = {'aeroplane' : np.empty((0, 4), dtype=np.float32), 
                  'bicycle' : np.empty((0, 4), dtype=np.float32), 
                  'bird' : np.empty((0, 4), dtype=np.float32), 
                  'boat' : np.empty((0, 4), dtype=np.float32), 
                  'bottle' : np.empty((0, 4), dtype=np.float32), 
                  'bus' : np.empty((0, 4), dtype=np.float32), 
                  'car' : np.empty((0, 4), dtype=np.float32), 
                  'cat' : np.empty((0, 4), dtype=np.float32), 
                  'chair' : np.empty((0, 4), dtype=np.float32), 
                  'cow' : np.empty((0, 4), dtype=np.float32), 
                  'diningtable' : np.empty((0, 4), dtype=np.float32), 
                  'dog' : np.empty((0, 4), dtype=np.float32), 
                  'horse' : np.empty((0, 4), dtype=np.float32), 
                  'motorbike' : np.empty((0, 4), dtype=np.float32), 
                  'person' : np.empty((0, 4), dtype=np.float32), 
                  'pottedplant' : np.empty((0, 4), dtype=np.float32), 
                  'sheep' : np.empty((0, 4), dtype=np.float32), 
                  'sofa' : np.empty((0, 4), dtype=np.float32), 
                  'train' : np.empty((0, 4), dtype=np.float32), 
                  'tvmonitor':np.empty((0, 4), dtype=np.float32)}
        tmp_idx = np.where(roidb[i]['labels'][0][:imdb.num_classes])[0]

        for j in xrange(len(tmp_idx)):
            idx_real = np.argmax(scores[:, tmp_idx[j]])
            gt_tmp[imdb.classes[tmp_idx[j]]] = np.array([boxes[idx_real, tmp_idx[j]*4+1], 
                                                         boxes[idx_real, tmp_idx[j]*4], 
                                                         boxes[idx_real, tmp_idx[j]*4+3],
                                                         boxes[idx_real, tmp_idx[j]*4+2]], dtype=np.float32)
            gt_tmp[imdb.classes[tmp_idx[j]]] += 1

        gt[i] = {'gt' : gt_tmp}

        images_real[i] = imdb.image_index[i]
        
        _t['misc'].toc()

        print 'im_detect: {:d}/{:d} {:.3f}s {:.3f}s' \
              .format(i + 1, num_images, _t['im_detect'].average_time,
                      _t['misc'].average_time)
        
    model_save_gt = {'images' : images_real, 'gt' : gt}
    sio.savemat('{}_gt.mat'.format(imdb.name), model_save_gt)

    dis_file = os.path.join(output_dir, 'discovery.pkl')
    with open(dis_file, 'wb') as f:
        cPickle.dump(all_boxes, f, cPickle.HIGHEST_PROTOCOL)
        
    dis_file_all = os.path.join(output_dir, 'discovery_all.pkl')
    results_all = {'scores_all' : scores_all, 'boxes_all' : boxes_all}
    with open(dis_file_all, 'wb') as f:
        cPickle.dump(results_all, f, cPickle.HIGHEST_PROTOCOL)

    try:
        print 'Applying NMS to all discovery', cfg.TEST.NMS
        nms_dets = apply_nms(all_boxes, cfg.TEST.NMS)

        nms_file = os.path.join(output_dir, 'nms_discovery.pkl')
        with open(nms_file, 'wb') as f:
            cPickle.dump(nms_dets, f, cPickle.HIGHEST_PROTOCOL)
    except Exception as e:
        print e

    print 'Evaluating discovery'
    imdb.evaluate_discovery(all_boxes, output_dir)
예제 #10
0
def gogo():
    print 'starting ensemble'

    thresh = 0.01
    #data_type = 'val'
    data_type = 'test'
    
    exclude_worst = True

    num_classes = 201
    
    imdb_name = 'imagenet_' + data_type
    base_dir = '/home/dj/big/workspace/fast-rcnn/output/ensemble/'
    data = []
    result = []
    
    # 1. vgg16 frcnn    
    data.append('vgg16_imagenet_fast_rcnn_with_ss_iter_500000')
    result.append('result_comp4-1648.txt')

    # 2. vgg16 step 2_1   
    #data.append('vgg16_imagenet_fast_rcnn_step2_with_rpn_iter_520000')
    #result.append('result_comp4-13436.txt')

    # 3. vgg16 step 2_2    
    #data.append('vgg16_imagenet_fast_rcnn2_step2_with_rpn_iter_520000')
    #result.append('result_comp4-31392.txt')
    
    # (2, 3) vgg16 avg (step 2_1, step 2_2)    
    data.append('vgg16_imagenet_fast_rcnn_avg_2_3')
    result.append('result_comp4-3153.txt')
    
    # (2, 3, 5) vgg16 avg (step 2_1, step 2_2, step 2_4)    
    #data.append('vgg16_imagenet_fast_rcnn_with_rpn_avg_2_3_5')
    #result.append('result_comp4-33831.txt')
    
    # (2, 3, 6) vgg16 avg (step 2_1, step 2_2, step 4)    
    #data.append('vgg16_imagenet_fast_rcnn_avg_2_3_6')
    #result.append('result_comp4-22284.txt')
    
    # (2, 3, 4, 5) vgg16 avg (step 2_1, step 2_2, step 2_3, step 2_4)    
    #data.append('vgg16_imagenet_fast_rcnn_with_rpn_avg_2_3_4_5')
    #result.append('result_comp4-6730.txt')
    
    # (2, 3, 5, 6) vgg16 avg (step 2_1, step 2_2, step 2_4, step 4)    
    #data.append('vgg16_imagenet_fast_rcnn_avg_2_3_5_6')
    #result.append('result_comp4-6456.txt')

    # 6. vgg16 step 4    
    data.append('vgg16_imagenet_fast_rcnn_step4_with_rpn_iter_360000')
    result.append('result_comp4-16205.txt')

    # 7. vgg19 frcnn    
    data.append('vgg19_imagenet_fast_rcnn_with_ss_iter_470000')
    result.append('result_comp4-37160.txt')

    # 8. googlenet frcnn    
    #data.append('googlenet_imagenet_fast_rcnn_with_ss_iter_480000')
    #result.append('result_comp4-42391.txt')

    # 9. vgg16 step 3
    #data.append('vgg16_imagenet_fast_rcnn_step2_with_rpn_step3_iter_520000')
    #result.append('result_comp4-25665.txt')
    
    output_dir = '%s/results' % base_dir 
    
    all_boxes = None

    total_result = np.zeros((num_classes, len(data)))
    data_no = 0
    for one_result, one_data in zip(result, data):
        result_file = base_dir + one_data + '/val/' + one_result
        with open(result_file, 'rt') as f:
            line_no = 0
            for one_line in f.readlines():
                try:
                    one_number = float(one_line.rstrip())
                except:
                    continue
                line_no += 1
                total_result[line_no, data_no] = one_number                 
                if line_no >= num_classes - 1:
                    break
        data_no += 1
        
    min_data_index_per_class = np.argmin(total_result, axis=1)
    
    data_no = 0
    for one_data in data:
        det_file = base_dir + one_data + '/' + data_type + '/detections.pkl'
        if data_type == 'test':
            submission_file = base_dir + one_data + '/' + data_type + '/submission.txt'
        else:
            submission_file = ''
        
        print '[%s] processing %s' % (data_no + 1, one_data)
        
        with open(det_file, 'rb') as f:
            det = cPickle.load(f)

            num_images = len(det[0])

            # all_boxes[cls][image] = N x 5 array of detections in (x1, y1, x2, y2, score)
            if all_boxes == None:
                all_boxes = [[[] for _ in xrange(num_images)]
                             for _ in xrange(num_classes)]
            
            for cls_no in xrange(num_classes):
                if exclude_worst and cls_no > 0 and min_data_index_per_class[cls_no] == data_no:
                    continue
                
                for img_no in xrange(num_images):
                    det_value = det[cls_no][img_no]

                    if len(det_value) > 0:
                        inds = np.where((det_value[:, 4] >= thresh))[0]
                        det_value = det_value[inds]
                    
                    if len(all_boxes[cls_no][img_no]) == 0:
                        all_boxes[cls_no][img_no] = det_value
                    else:
                        all_boxes[cls_no][img_no] = np.vstack((all_boxes[cls_no][img_no], det_value))

        data_no += 1

    print ''
    print 'Applying NMS to all detections'
    nms_dets = apply_nms(all_boxes, cfg.TEST.NMS)
    
    all_boxes = None

    print 'Evaluating detections'
    imdb = get_imdb(imdb_name)    
    imdb.evaluate_detections(nms_dets, output_dir, submission_file)
예제 #11
0
def gogo():
    print 'starting ensemble'

    thresh = 0.01
    #data_type = 'val'
    data_type = 'test'

    exclude_worst = True

    num_classes = 201

    imdb_name = 'imagenet_' + data_type
    base_dir = '/home/dj/big/workspace/fast-rcnn/output/ensemble/'
    data = []
    result = []

    # 1. vgg16 frcnn
    data.append('vgg16_imagenet_fast_rcnn_with_ss_iter_500000')
    result.append('result_comp4-1648.txt')

    # 2. vgg16 step 2_1
    #data.append('vgg16_imagenet_fast_rcnn_step2_with_rpn_iter_520000')
    #result.append('result_comp4-13436.txt')

    # 3. vgg16 step 2_2
    #data.append('vgg16_imagenet_fast_rcnn2_step2_with_rpn_iter_520000')
    #result.append('result_comp4-31392.txt')

    # (2, 3) vgg16 avg (step 2_1, step 2_2)
    data.append('vgg16_imagenet_fast_rcnn_avg_2_3')
    result.append('result_comp4-3153.txt')

    # (2, 3, 5) vgg16 avg (step 2_1, step 2_2, step 2_4)
    #data.append('vgg16_imagenet_fast_rcnn_with_rpn_avg_2_3_5')
    #result.append('result_comp4-33831.txt')

    # (2, 3, 6) vgg16 avg (step 2_1, step 2_2, step 4)
    #data.append('vgg16_imagenet_fast_rcnn_avg_2_3_6')
    #result.append('result_comp4-22284.txt')

    # (2, 3, 4, 5) vgg16 avg (step 2_1, step 2_2, step 2_3, step 2_4)
    #data.append('vgg16_imagenet_fast_rcnn_with_rpn_avg_2_3_4_5')
    #result.append('result_comp4-6730.txt')

    # (2, 3, 5, 6) vgg16 avg (step 2_1, step 2_2, step 2_4, step 4)
    #data.append('vgg16_imagenet_fast_rcnn_avg_2_3_5_6')
    #result.append('result_comp4-6456.txt')

    # 6. vgg16 step 4
    data.append('vgg16_imagenet_fast_rcnn_step4_with_rpn_iter_360000')
    result.append('result_comp4-16205.txt')

    # 7. vgg19 frcnn
    data.append('vgg19_imagenet_fast_rcnn_with_ss_iter_470000')
    result.append('result_comp4-37160.txt')

    # 8. googlenet frcnn
    #data.append('googlenet_imagenet_fast_rcnn_with_ss_iter_480000')
    #result.append('result_comp4-42391.txt')

    # 9. vgg16 step 3
    #data.append('vgg16_imagenet_fast_rcnn_step2_with_rpn_step3_iter_520000')
    #result.append('result_comp4-25665.txt')

    output_dir = '%s/results' % base_dir

    all_boxes = None

    total_result = np.zeros((num_classes, len(data)))
    data_no = 0
    for one_result, one_data in zip(result, data):
        result_file = base_dir + one_data + '/val/' + one_result
        with open(result_file, 'rt') as f:
            line_no = 0
            for one_line in f.readlines():
                try:
                    one_number = float(one_line.rstrip())
                except:
                    continue
                line_no += 1
                total_result[line_no, data_no] = one_number
                if line_no >= num_classes - 1:
                    break
        data_no += 1

    min_data_index_per_class = np.argmin(total_result, axis=1)

    data_no = 0
    for one_data in data:
        det_file = base_dir + one_data + '/' + data_type + '/detections.pkl'
        if data_type == 'test':
            submission_file = base_dir + one_data + '/' + data_type + '/submission.txt'
        else:
            submission_file = ''

        print '[%s] processing %s' % (data_no + 1, one_data)

        with open(det_file, 'rb') as f:
            det = cPickle.load(f)

            num_images = len(det[0])

            # all_boxes[cls][image] = N x 5 array of detections in (x1, y1, x2, y2, score)
            if all_boxes == None:
                all_boxes = [[[] for _ in xrange(num_images)]
                             for _ in xrange(num_classes)]

            for cls_no in xrange(num_classes):
                if exclude_worst and cls_no > 0 and min_data_index_per_class[
                        cls_no] == data_no:
                    continue

                for img_no in xrange(num_images):
                    det_value = det[cls_no][img_no]

                    if len(det_value) > 0:
                        inds = np.where((det_value[:, 4] >= thresh))[0]
                        det_value = det_value[inds]

                    if len(all_boxes[cls_no][img_no]) == 0:
                        all_boxes[cls_no][img_no] = det_value
                    else:
                        all_boxes[cls_no][img_no] = np.vstack(
                            (all_boxes[cls_no][img_no], det_value))

        data_no += 1

    print ''
    print 'Applying NMS to all detections'
    nms_dets = apply_nms(all_boxes, cfg.TEST.NMS)

    all_boxes = None

    print 'Evaluating detections'
    imdb = get_imdb(imdb_name)
    imdb.evaluate_detections(nms_dets, output_dir, submission_file)
예제 #12
0
def StageOne(file_,
             prototxt,
             model,
             classes,
             THRESHOLD=1.0 / 3,
             num_images=1,
             output_dir='/home/ubuntu/py-faster-rcnn/output'):
    '''
	run one image through object detector to classify each cell as background, rbc, or other
	Return: all boxes with score above THRESHOLD
    '''
    net = caffe.Net(prototxt, model, caffe.TEST)
    net.name = os.path.splitext(os.path.basename(model))[0]

    _t = {'im_detect': Timer(), 'misc': Timer()}
    # top_scores will hold one minheap of scores per class (used to enforce
    # the max_per_set constraint)
    num_classes = len(classes)
    top_scores = [[] for _ in xrange(num_classes)]
    # all detections are collected into:
    #    all_boxes[cls] = N x 5 array of detections in
    #    (x1, y1, x2, y2, score)
    all_boxes = [[[] for _ in xrange(num_images)] for _ in xrange(num_classes)]

    for i in xrange(num_images):
        # filter out any ground truth boxes
        if cfg.TEST.HAS_RPN:
            box_proposals = None
        else:
            raise Exception("HAS_RPN is False")
        im = cv2.imread(file_)
        _t['im_detect'].tic()
        scores, boxes = im_detect(net, im, box_proposals)
        _t['im_detect'].toc()

        _t['misc'].tic()
        for j in xrange(1, num_classes):
            inds = np.where(scores[:, j] > THRESHOLD)[0]
            cls_scores = scores[inds, j]
            cls_boxes = boxes[inds, j * 4:(j + 1) * 4]
            top_inds = np.argsort(-cls_scores)
            cls_scores = cls_scores[top_inds]
            cls_boxes = cls_boxes[top_inds, :]
            # push new scores onto the minheap
            for val in cls_scores:
                heapq.heappush(top_scores[j], val)
            # if we've collected more than the max number of detection,
            # then pop items off the minheap and update the class threshold
            #if len(top_scores[j]) > max_per_set:
            #    while len(top_scores[j]) > max_per_set:
            #        heapq.heappop(top_scores[j])
            #    thresh[j] = top_scores[j][0]

            all_boxes[j][i] = \
                    np.hstack((cls_boxes, cls_scores[:, np.newaxis])) \
                    .astype(np.float32, copy=False)

    _t['misc'].toc()

    #print 'im_detect: {:d}/{:d} {:.3f}s {:.3f}s' \
    #          .format(i + 1, num_images, _t['im_detect'].average_time,
    #                  _t['misc'].average_time)

    #only keep boxes with scores above the threshold
    for j in xrange(1, num_classes):
        for i in xrange(num_images):
            inds = np.where(all_boxes[j][i][:, -1] > THRESHOLD)[0]
            all_boxes[j][i] = all_boxes[j][i][inds, :]

    det_file = os.path.join(output_dir, 'detections.pkl')
    with open(det_file, 'wb') as f:
        cPickle.dump(all_boxes, f, cPickle.HIGHEST_PROTOCOL)
    #Apply NMS to all detections
    nms_dets = apply_nms(all_boxes, cfg.TEST.NMS)
    with open(det_file, 'wb') as f:
        cPickle.dump(nms_dets, f, cPickle.HIGHEST_PROTOCOL)
    return nms_dets