def eval_all(args): # model_path saveDir = config.model_dir evalDir = config.eval_dir misc_utils.ensure_dir(evalDir) model_file = os.path.join(saveDir, 'epoch_{}.pkl'.format(args.resume_weights)) assert os.path.exists(model_file) # load data records = misc_utils.load_json_lines(config.eval_source) # multiprocessing num_records = len(records) num_devs = args.devices num_image = math.ceil(num_records / num_devs) result_queue = Queue(1000) procs = [] all_results = [] for i in range(num_devs): start = i * num_image end = min(start + num_image, num_records) split_records = records[start:end] proc = Process(target=inference, args=(model_file, i, split_records, result_queue)) proc.start() procs.append(proc) pbar = tqdm(total=num_records, ncols=50) for i in range(num_records): t = result_queue.get() all_results.append(t) pbar.update(1) for p in procs: p.join() fpath = os.path.join(evalDir, 'dump-{}.json'.format(args.resume_weights)) misc_utils.save_json_lines(all_results, fpath)
def run_test(): parser = argparse.ArgumentParser() # parser.add_argument('--resume_weights', '-r', default=None, type=str) parser.add_argument('--start_epoch', '-s',default = 30, type=int) parser.add_argument('--end_epoch','-e', default= 50, type=int) parser.add_argument('--devices', '-d', default=1, type=int) args = parser.parse_args() # eval_all(args) # model_path model_dir = config.model_dir eval_dir = config.eval_dir # misc_utils.ensure_dir(evalDir) ensure_dir(config.eval_dir) records = load_json_lines(config.eval_source) start_epoch, end_epoch = args.start_epoch, args.end_epoch for epoch in range(start_epoch, end_epoch): model_file = osp.join(model_dir, 'epoch-{}.pkl'.format(epoch)) if not osp.exists(model_file): continue results = eval_all(model_file, records, args) fpath = osp.join(eval_dir, 'epoch-{}.human'.format(epoch)) save_json_lines(results, fpath)
def inference(args): @jit.trace(symbolic=False) def val_func(): pred_boxes = net(net.inputs) return pred_boxes # model path saveDir = config.model_dir evalDir = config.eval_dir misc_utils.ensure_dir(evalDir) model_file = os.path.join(saveDir, 'epoch_{}.pkl'.format(args.resume_weights)) assert os.path.exists(model_file) # load model net = network.Network() net.eval() check_point = mge.load(model_file) net.load_state_dict(check_point['state_dict']) image, im_info = get_data(args.img_path) net.inputs["image"].set_value(image.astype(np.float32)) net.inputs["im_info"].set_value(im_info) pred_boxes = val_func().numpy() num_tag = config.num_classes - 1 target_shape = (pred_boxes.shape[0] // num_tag // top_k, top_k) pred_tags = (np.arange(num_tag) + 1).reshape(-1, 1) pred_tags = np.tile(pred_tags, target_shape).reshape(-1, 1) # nms if if_set_nms: from set_nms_utils import set_cpu_nms n = pred_boxes.shape[0] // top_k idents = np.tile(np.arange(n)[:, None], (1, top_k)).reshape(-1, 1) pred_boxes = np.hstack((pred_boxes, idents)) keep = pred_boxes[:, -2] > 0.05 pred_boxes = pred_boxes[keep] pred_tags = pred_tags[keep] keep = set_cpu_nms(pred_boxes, 0.5) pred_boxes = pred_boxes[keep][:, :-1] pred_tags = pred_tags[keep].flatten() else: from set_nms_utils import cpu_nms keep = pred_boxes[:, -1] > 0.05 pred_boxes = pred_boxes[keep] pred_tags = pred_tags[keep] keep = cpu_nms(pred_boxes, 0.5) pred_boxes = pred_boxes[keep] pred_tags = pred_tags[keep].flatten() result_dict = dict(height=int(im_info[0, -2]), width=int(im_info[0, -1]), dtboxes=boxes_dump(pred_boxes, pred_tags)) name = args.img_path.split('/')[-1].split('.')[-2] misc_utils.save_json_lines([result_dict], '{}.json'.format(name))
def run_test(): parser = argparse.ArgumentParser() parser.add_argument('--start_epoch', '-s',default = 30, type=int) parser.add_argument('--end_epoch','-e', default= 50, type=int) parser.add_argument('--devices', '-d', default=1, type=int) args = parser.parse_args() # model_path model_dir = config.model_dir eval_dir = config.eval_dir ensure_dir(config.eval_dir) records = load_json_lines(config.eval_source) start_epoch, end_epoch = args.start_epoch, args.end_epoch for epoch in range(start_epoch, end_epoch): model_file = osp.join(model_dir, 'epoch-{}.pkl'.format(epoch)) if not osp.exists(model_file): continue print('Processing {}'.format(osp.basename(model_file))) results = eval_all(model_file, records, args) fpath = osp.join(eval_dir, 'epoch-{}.human'.format(epoch)) save_json_lines(results, fpath)