def worker(current_network, weight_file, dataset_dir): cfg = current_network.Cfg() cfg.backbone_pretrained = False model = current_network.Net(cfg) model.eval() state_dict = mge.load(weight_file) if "state_dict" in state_dict: state_dict = state_dict["state_dict"] model.load_state_dict(state_dict) evaluator = DetEvaluator(model) test_loader = build_dataloader(dataset_dir, model.cfg) if dist.get_rank() == 0: test_loader = tqdm(test_loader) result_list = [] for data in test_loader: image, im_info = DetEvaluator.process_inputs( data[0][0], model.cfg.test_image_short_size, model.cfg.test_image_max_size, ) pred_res = evaluator.predict(image=mge.tensor(image), im_info=mge.tensor(im_info)) result = { "pred_boxes": pred_res, "image_id": int(data[1][2][0].split(".")[0].split("_")[-1]), } result_list.append(result) return result_list
def main(): parser = make_parser() args = parser.parse_args() current_network = import_from_file(args.file) cfg = current_network.Cfg() cfg.backbone_pretrained = False model = current_network.Net(cfg) model.eval() state_dict = mge.load(args.weight_file) if "state_dict" in state_dict: state_dict = state_dict["state_dict"] model.load_state_dict(state_dict) evaluator = DetEvaluator(model) ori_img = cv2.imread(args.image) image, im_info = DetEvaluator.process_inputs( ori_img.copy(), model.cfg.test_image_short_size, model.cfg.test_image_max_size, ) pred_res = evaluator.predict(image=mge.tensor(image), im_info=mge.tensor(im_info)) res_img = DetEvaluator.vis_det( ori_img, pred_res, is_show_label=True, classes=data_mapper[cfg.test_dataset["name"]].class_names, ) cv2.imwrite("results.jpg", res_img)
def worker( current_network, model_file, data_dir, worker_id, total_worker, result_queue, ): """ :param net_file: network description file :param model_file: file of dump weights :param data_dir: the dataset directory :param worker_id: the index of the worker :param total_worker: number of gpu for evaluation :param result_queue: processing queue """ os.environ["CUDA_VISIBLE_DEVICES"] = str(worker_id) @jit.trace(symbolic=True) def val_func(): pred = model(model.inputs) return pred cfg = current_network.Cfg() cfg.backbone_pretrained = False model = current_network.Net(cfg, batch_size=1) model.eval() evaluator = DetEvaluator(model) state_dict = mge.load(model_file) if "state_dict" in state_dict: state_dict = state_dict["state_dict"] model.load_state_dict(state_dict) loader = build_dataloader(worker_id, total_worker, data_dir, model.cfg) for data_dict in loader: data, im_info = DetEvaluator.process_inputs( data_dict[0][0], model.cfg.test_image_short_size, model.cfg.test_image_max_size, ) model.inputs["im_info"].set_value(im_info) model.inputs["image"].set_value(data.astype(np.float32)) pred_res = evaluator.predict(val_func) result_queue.put_nowait({ "det_res": pred_res, "image_id": int(data_dict[1][2][0].split(".")[0].split("_")[-1]), })
def worker(current_network, weight_file, dataset_dir, result_list, master_ip=None, port=None, world_size=1, rank=0): if world_size > 1: dist.init_process_group( master_ip=master_ip, port=port, world_size=world_size, rank=rank, device=rank, ) cfg = current_network.Cfg() cfg.backbone_pretrained = False model = current_network.Net(cfg) model.eval() state_dict = mge.load(weight_file) if "state_dict" in state_dict: state_dict = state_dict["state_dict"] model.load_state_dict(state_dict) evaluator = DetEvaluator(model) test_loader = build_dataloader(dataset_dir, model.cfg) if dist.get_world_size() == 1: test_loader = tqdm(test_loader) for data in test_loader: image, im_info = DetEvaluator.process_inputs( data[0][0], model.cfg.test_image_short_size, model.cfg.test_image_max_size, ) pred_res = evaluator.predict(image=mge.tensor(image), im_info=mge.tensor(im_info)) result = { "det_res": pred_res, "image_id": int(data[1][2][0].split(".")[0].split("_")[-1]), } if dist.get_world_size() > 1: result_list.put_nowait(result) else: result_list.append(result)
def main(): parser = make_parser() args = parser.parse_args() logger.info("Load Model : %s completed", args.weight_file) @jit.trace(symbolic=True) def val_func(): pred = model(model.inputs) return pred sys.path.insert(0, os.path.dirname(args.file)) current_network = importlib.import_module( os.path.basename(args.file).split(".")[0]) cfg = current_network.Cfg() cfg.backbone_pretrained = False model = current_network.Net(cfg, batch_size=1) model.eval() state_dict = mge.load(args.weight_file) if "state_dict" in state_dict: state_dict = state_dict["state_dict"] model.load_state_dict(state_dict) evaluator = DetEvaluator(model) ori_img = cv2.imread(args.image) data, im_info = DetEvaluator.process_inputs( ori_img.copy(), model.cfg.test_image_short_size, model.cfg.test_image_max_size, ) model.inputs["im_info"].set_value(im_info) model.inputs["image"].set_value(data.astype(np.float32)) pred_res = evaluator.predict(val_func) res_img = DetEvaluator.vis_det( ori_img, pred_res, is_show_label=True, classes=COCO.class_names, ) cv2.imwrite("results.jpg", res_img)
def detect_persons(self, image): data, im_info = DetEvaluator.process_inputs( image.copy(), self.detector.cfg.test_image_short_size, self.detector.cfg.test_image_max_size, ) self.detector.inputs["im_info"].set_value(im_info) self.detector.inputs["image"].set_value(data.astype(np.float32)) evaluator = DetEvaluator(self.detector) det_res = evaluator.predict(self.det_func) persons = [] for d in det_res: cls_id = int(d[5] + 1) if cls_id == 1: bbox = d[:4] persons.append(bbox) return persons
def detect_persons(self, image): data, im_info = DetEvaluator.process_inputs( image.copy(), self.detector.cfg.test_image_short_size, self.detector.cfg.test_image_max_size, ) evaluator = DetEvaluator(self.detector) det_res = evaluator.predict(image=mge.tensor(data), im_info=mge.tensor(im_info)) persons = [] for d in det_res: cls_id = int(d[5] + 1) if cls_id == 1: bbox = d[:5] persons.append(bbox) persons = np.array(persons).reshape(-1, 5) keep = py_cpu_nms(persons, cfg.nms_thr) return persons[keep]
def main(): # pylint: disable=import-outside-toplevel,too-many-branches,too-many-statements from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval parser = make_parser() args = parser.parse_args() current_network = import_from_file(args.file) cfg = current_network.Cfg() if args.weight_file: args.start_epoch = args.end_epoch = -1 else: if args.start_epoch == -1: args.start_epoch = cfg.max_epoch - 1 if args.end_epoch == -1: args.end_epoch = args.start_epoch assert 0 <= args.start_epoch <= args.end_epoch < cfg.max_epoch for epoch_num in range(args.start_epoch, args.end_epoch + 1): if args.weight_file: weight_file = args.weight_file else: weight_file = "log-of-{}/epoch_{}.pkl".format( os.path.basename(args.file).split(".")[0], epoch_num) result_list = [] if args.devices > 1: result_queue = Queue(2000) master_ip = "localhost" server = dist.Server() port = server.py_server_port procs = [] for i in range(args.devices): proc = Process( target=worker, args=( current_network, weight_file, args.dataset_dir, result_queue, master_ip, port, args.devices, i, ), ) proc.start() procs.append(proc) num_imgs = dict(coco=5000, objects365=30000) for _ in tqdm(range(num_imgs[cfg.test_dataset["name"]])): result_list.append(result_queue.get()) for p in procs: p.join() else: worker(current_network, weight_file, args.dataset_dir, result_list) all_results = DetEvaluator.format(result_list, cfg) json_path = "log-of-{}/epoch_{}.json".format( os.path.basename(args.file).split(".")[0], epoch_num) all_results = json.dumps(all_results) with open(json_path, "w") as fo: fo.write(all_results) logger.info("Save to %s finished, start evaluation!", json_path) eval_gt = COCO( os.path.join(args.dataset_dir, cfg.test_dataset["name"], cfg.test_dataset["ann_file"])) eval_dt = eval_gt.loadRes(json_path) cocoEval = COCOeval(eval_gt, eval_dt, iouType="bbox") cocoEval.evaluate() cocoEval.accumulate() cocoEval.summarize() metrics = [ "AP", "[email protected]", "[email protected]", "APs", "APm", "APl", "AR@1", "AR@10", "AR@100", "ARs", "ARm", "ARl", ] logger.info("mmAP".center(32, "-")) for i, m in enumerate(metrics): logger.info("|\t%s\t|\t%.03f\t|", m, cocoEval.stats[i]) logger.info("-" * 32)
def main(): # pylint: disable=import-outside-toplevel,too-many-branches,too-many-statements from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval parser = make_parser() args = parser.parse_args() current_network = import_from_file(args.file) cfg = current_network.Cfg() if args.weight_file: args.start_epoch = args.end_epoch = -1 else: if args.start_epoch == -1: args.start_epoch = cfg.max_epoch - 1 if args.end_epoch == -1: args.end_epoch = args.start_epoch assert 0 <= args.start_epoch <= args.end_epoch < cfg.max_epoch for epoch_num in range(args.start_epoch, args.end_epoch + 1): if args.weight_file: weight_file = args.weight_file else: weight_file = "log-of-{}/epoch_{}.pkl".format( os.path.basename(args.file).split(".")[0], epoch_num) if args.devices > 1: dist_worker = dist.launcher(n_gpus=args.devices)(worker) result_list = dist_worker(current_network, weight_file, args.dataset_dir) result_list = sum(result_list, []) else: result_list = worker(current_network, weight_file, args.dataset_dir) all_results = DetEvaluator.format(result_list, cfg) if args.weight_file: json_path = "{}_{}.json".format( os.path.basename(args.file).split(".")[0], os.path.basename(args.weight_file).split(".")[0], ) else: json_path = "log-of-{}/epoch_{}.json".format( os.path.basename(args.file).split(".")[0], epoch_num) all_results = json.dumps(all_results) with open(json_path, "w") as fo: fo.write(all_results) logger.info("Save results to %s, start evaluation!", json_path) eval_gt = COCO( os.path.join(args.dataset_dir, cfg.test_dataset["name"], cfg.test_dataset["ann_file"])) eval_dt = eval_gt.loadRes(json_path) cocoEval = COCOeval(eval_gt, eval_dt, iouType="bbox") cocoEval.evaluate() cocoEval.accumulate() cocoEval.summarize() metrics = [ "AP", "[email protected]", "[email protected]", "APs", "APm", "APl", "AR@1", "AR@10", "AR@100", "ARs", "ARm", "ARl", ] logger.info("mmAP".center(32, "-")) for i, m in enumerate(metrics): logger.info("|\t%s\t|\t%.03f\t|", m, cocoEval.stats[i]) logger.info("-" * 32)