def MaskRcnn_eval(dataset_path, ckpt_path, ann_file): """MaskRcnn evaluation.""" ds = create_maskrcnn_dataset(dataset_path, batch_size=config.test_batch_size, is_training=False) net = Mask_Rcnn_Resnet50(config) param_dict = load_checkpoint(ckpt_path) load_param_into_net(net, param_dict) net.set_train(False) eval_iter = 0 total = ds.get_dataset_size() outputs = [] dataset_coco = COCO(ann_file) print("\n========================================\n") print("total images num: ", total) print("Processing, please wait a moment.") max_num = 128 for data in ds.create_dict_iterator(output_numpy=True, num_epochs=1): eval_iter = eval_iter + 1 img_data = data['image'] img_metas = data['image_shape'] gt_bboxes = data['box'] gt_labels = data['label'] gt_num = data['valid_num'] gt_mask = data["mask"] start = time.time() # run net output = net(Tensor(img_data), Tensor(img_metas), Tensor(gt_bboxes), Tensor(gt_labels), Tensor(gt_num), Tensor(gt_mask)) end = time.time() print("Iter {} cost time {}".format(eval_iter, end - start)) # output all_bbox = output[0] all_label = output[1] all_mask = output[2] all_mask_fb = output[3] for j in range(config.test_batch_size): all_bbox_squee = np.squeeze(all_bbox.asnumpy()[j, :, :]) all_label_squee = np.squeeze(all_label.asnumpy()[j, :, :]) all_mask_squee = np.squeeze(all_mask.asnumpy()[j, :, :]) all_mask_fb_squee = np.squeeze(all_mask_fb.asnumpy()[j, :, :, :]) all_bboxes_tmp_mask = all_bbox_squee[all_mask_squee, :] all_labels_tmp_mask = all_label_squee[all_mask_squee] all_mask_fb_tmp_mask = all_mask_fb_squee[all_mask_squee, :, :] if all_bboxes_tmp_mask.shape[0] > max_num: inds = np.argsort(-all_bboxes_tmp_mask[:, -1]) inds = inds[:max_num] all_bboxes_tmp_mask = all_bboxes_tmp_mask[inds] all_labels_tmp_mask = all_labels_tmp_mask[inds] all_mask_fb_tmp_mask = all_mask_fb_tmp_mask[inds] bbox_results = bbox2result_1image(all_bboxes_tmp_mask, all_labels_tmp_mask, config.num_classes) segm_results = get_seg_masks(all_mask_fb_tmp_mask, all_bboxes_tmp_mask, all_labels_tmp_mask, img_metas[j], True, config.num_classes) outputs.append((bbox_results, segm_results)) eval_types = ["bbox", "segm"] result_files = results2json(dataset_coco, outputs, "./results.pkl") coco_eval(result_files, eval_types, dataset_coco, single_result=False)
time.sleep(5) if not args_opt.only_create_dataset: loss_scale = float(config.loss_scale) # When create MindDataset, using the fitst mindrecord file, such as MaskRcnn.mindrecord0. dataset = create_maskrcnn_dataset(mindrecord_file, batch_size=config.batch_size, device_num=device_num, rank_id=rank) dataset_size = dataset.get_dataset_size() print("total images num: ", dataset_size) print("Create dataset done!") net = Mask_Rcnn_Resnet50(config=config) net = net.set_train() load_path = args_opt.pre_trained if load_path != "": param_dict = load_checkpoint(load_path) if config.pretrain_epoch_size == 0: for item in list(param_dict.keys()): if not (item.startswith('backbone') or item.startswith('rcnn_mask')): param_dict.pop(item) load_param_into_net(net, param_dict) loss = LossNet() lr = Tensor( dynamic_lr(config,
def create_network(name, *args, **kwargs): if name == "maskrcnn": return Mask_Rcnn_Resnet50(config=config) raise NotImplementedError(f"{name} is not implemented in the repo")