def rpn_generate(queue=None, imdb_name=None, rpn_model_path=None, cfg=None, rpn_test_prototxt=None): """Use a trained RPN to generate proposals. """ cfg.TEST.RPN_PRE_NMS_TOP_N = -1 # no pre NMS filtering cfg.TEST.RPN_POST_NMS_TOP_N = 2000 # limit top boxes after NMS print 'RPN model: {}'.format(rpn_model_path) print('Using config:') pprint.pprint(cfg) import caffe _init_caffe(cfg) # NOTE: the matlab implementation computes proposals on flipped images, too. # We compute them on the image once and then flip the already computed # proposals. This might cause a minor loss in mAP (less proposal jittering). imdb = get_imdb(imdb_name) print 'Loaded dataset `{:s}` for proposal generation'.format(imdb.name) # Load RPN and configure output directory rpn_net = caffe.Net(rpn_test_prototxt, rpn_model_path, caffe.TEST) output_dir = get_output_dir(cfg, imdb, args.run_name) print 'Output will be saved to `{:s}`'.format(output_dir) # Generate proposals on the imdb rpn_proposals = imdb_proposals(rpn_net, imdb) # Write proposals to disk and send the proposal file path through the # multiprocessing queue rpn_net_name = os.path.splitext(os.path.basename(rpn_model_path))[0] rpn_proposals_path = os.path.join( output_dir, rpn_net_name + '_proposals.pkl') with open(rpn_proposals_path, 'wb') as f: cPickle.dump(rpn_proposals, f, cPickle.HIGHEST_PROTOCOL) print 'Wrote RPN proposals to {}'.format(rpn_proposals_path) queue.put({'proposal_path': rpn_proposals_path})
def get_roidb(imdb_name): imdb = get_imdb(imdb_name) print 'Loaded dataset `{:s}` for training'.format(imdb.name) imdb.set_proposal_method(cfg.TRAIN.PROPOSAL_METHOD) print 'Set proposal method: {:s}'.format(cfg.TRAIN.PROPOSAL_METHOD) roidb = get_training_roidb(imdb) return roidb
def get_roidb(imdb_name, rpn_file=None): imdb = get_imdb(imdb_name) print 'Loaded dataset `{:s}` for training'.format(imdb.name) imdb.set_proposal_method(cfg.TRAIN.PROPOSAL_METHOD) print 'Set proposal method: {:s}'.format(cfg.TRAIN.PROPOSAL_METHOD) if rpn_file is not None: imdb.config['rpn_file'] = rpn_file roidb = get_training_roidb(imdb) return roidb, imdb
def load_database(args): print("Setting up image database: " + args.dataset) imdb = get_imdb(args.dataset) print('Loaded dataset `{:s}` for training'.format(imdb.name)) roidb = get_training_roidb(imdb, args.use_flipped == "True") print('{:d} roidb entries'.format(len(roidb))) if args.dataset_validation != "no": print("Setting up validation image database: " + args.dataset_validation) imdb_val = get_imdb(args.dataset_validation) print('Loaded dataset `{:s}` for validation'.format(imdb_val.name)) roidb_val = get_training_roidb(imdb_val, False) print('{:d} roidb entries'.format(len(roidb_val))) else: imdb_val = None roidb_val = None data_layer = RoIDataLayer(roidb, imdb.num_classes) if roidb_val is not None: data_layer_val = RoIDataLayer(roidb_val, imdb_val.num_classes, random=True) return imdb, roidb, imdb_val, roidb_val, data_layer, data_layer_val
def from_dets(imdb_name, output_dir, args): imdb = get_imdb(imdb_name) imdb.competition_mode(args.comp_mode) imdb.config['matlab_eval'] = args.matlab_eval ## with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f: ## dets = cPickle.load(f) ## if args.apply_nms: ## print 'Applying NMS to all detections' ## nms_dets = apply_nms(dets, cfg.TEST.NMS) ## else: ## nms_dets = dets print 'Evaluating detections' ##imdb.evaluate_detections(nms_dets, output_dir) imdb._do_python_eval(output_dir)
def combined_roidb(imdb_names): def get_roidb(imdb_name): imdb = get_imdb(imdb_name) print 'Loaded dataset `{:s}` for training'.format(imdb.name) imdb.set_proposal_method(cfg.TRAIN.PROPOSAL_METHOD) print 'Set proposal method: {:s}'.format(cfg.TRAIN.PROPOSAL_METHOD) roidb = get_training_roidb(imdb) return roidb roidbs = [get_roidb(s) for s in imdb_names.split('+')] roidb = roidbs[0] if len(roidbs) > 1: for r in roidbs[1:]: roidb.extend(r) imdb = datasets.imdb.imdb(imdb_names) else: imdb = get_imdb(imdb_names) return imdb, roidb
return args if __name__ == "__main__": args = parse_args() t = Timer() with open(args.labels_file, 'r') as lf: label_lines = lf.readlines() classes = [x.strip().replace(" ", "_") for x in label_lines] mean = np.load(args.meanfile).mean(1).mean(1) g = GoogLeNet( args.prototxt, args.caffemodel, classes, mean, gpu_id = args.gpu_id) print(mean) traffic_imdb = factory.get_imdb(args.imdb_name) with open(args.detfile, 'r') as df: det_lines = df.readlines() splitlines = [x.strip().split(' ') for x in det_lines] det_by_img = {x[0]: x[1:] for x in splitlines} det_by_img = {} for x in splitlines: if not x[0] in det_by_img: det_by_img[x[0]] = [] det_by_img[x[0]].append(x[1:]) count = 0 all_boxes = [ [[] for _ in xrange(len(traffic_imdb.image_index))] for _ in xrange(len(g.classes) + 1)] # +1 due to background
args = parse_args() print('Called with args:') print(args) if args.cfg_file is not None: cfg_from_file(args.cfg_file) if args.set_cfgs is not None: cfg_from_list(args.set_cfgs) cfg.GPU_ID = args.gpu_id print('Using config:') pprint.pprint(cfg) while not os.path.exists(args.caffemodel) and args.wait: print('Waiting for {} to exist...'.format(args.caffemodel)) time.sleep(10) caffe.set_mode_gpu() caffe.set_device(args.gpu_id) net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST) net.name = os.path.splitext(os.path.basename(args.caffemodel))[0] imdb = get_imdb(args.imdb_name) imdb.competition_mode(args.comp_mode) if not cfg.TEST.HAS_RPN: imdb.set_proposal_method(cfg.TEST.PROPOSAL_METHOD) test_net(net, imdb, max_per_image=args.max_per_image, vis=args.vis)