def _get_child_configer_transform(self, child_config_file): dataset_configer = Configer(configs=child_config_file) child_configer = self.configer.clone() child_configer.params_root['data'].update(dataset_configer.get('data')) if self.configer.exists( 'use_adaptive_transform') or self.dataset == 'val': child_configer.params_root.update({ 'train_trans': dataset_configer.params_root['train_trans'], 'val_trans': dataset_configer.params_root['val_trans'], }) return child_configer, CV2AugCompose(split=self.dataset, configer=child_configer)
def __init__(self, args, device='cuda'): if torch.cuda.device_count() == 0: device = 'cpu' self.device = torch.device(device) Log.info('Resuming from {}...'.format(args.model_path)) checkpoint_dict = torch.load(args.model_path) self.configer = Configer(config_dict=checkpoint_dict['config_dict'], args_parser=args, valid_flag="deploy") self.net = ModelManager(self.configer).get_deploy_model() RunnerHelper.load_state_dict(self.net, checkpoint_dict['state_dict'], False) if device == 'cuda': self.net = DataParallelModel(self.net, gather_=True) self.net = self.net.to(self.device).eval() self.test_loader = DataLoader(self.configer)
color_dst[label_map == i] = color_list[i % len(color_list)] color_img_rgb = np.array(color_dst, dtype=np.uint8) color_img_bgr = cv2.cvtColor(color_img_rgb, cv2.COLOR_RGB2BGR) if image_canvas is not None: image_canvas = cv2.addWeighted(image_canvas, 0.6, color_img_bgr, 0.4, 0) return image_canvas else: return color_img_bgr if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--configs', default=None, type=str, dest='configs', help='The file of the hyper parameters.') parser.add_argument('--image_file', default=None, type=str, dest='image_file', help='The image file of Seg Parser.') parser.add_argument('--label_file', default=None, type=str, dest='label_file', help='The label file of Seg Parser.') parser.add_argument('--image_dir', default=None, type=str, dest='image_dir', help='The image directory of Seg Parser.') parser.add_argument('--label_dir', default=None, type=str, dest='label_dir', help='The label directory of Seg Parser.') args_parser = parser.parse_args() seg_parser = SegParser(Configer(configs=args_parser.configs)) seg_parser.parse_img_seg(args_parser.image_file, args_parser.label_file) seg_parser.parse_dir_seg(args_parser.image_dir, args_parser.label_dir)
dest='deploy.norm_type', help='The pool type.') parser.add_argument('--gpu_id', default=[0, 1], type=int, dest='gpu_id', help='The gpu id.') args = parser.parse_args() #second model_path1 = '../../checkpoints/cls/resnet152/google_landmark_2020_resnet152_v2cluster_448_GPU8_final.pth' model_path2 = '../../checkpoints/cls/resnest200/google_landmark_2020_resnest200_v2cluster_448_GPU8_final.pth' checkpoint_dict1 = torch.load(model_path1) configer1 = Configer(config_dict=checkpoint_dict1['config_dict'], args_parser=args, valid_flag="deploy") net1 = ModelManager(configer1).get_deploy_model() RunnerHelper.load_state_dict(net1, checkpoint_dict1['state_dict'], False) checkpoint_dict2 = torch.load(model_path2) configer2 = Configer(config_dict=checkpoint_dict2['config_dict'], args_parser=args, valid_flag="deploy") net2 = ModelManager(configer2).get_deploy_model() RunnerHelper.load_state_dict(net2, checkpoint_dict2['state_dict'], False) net = MergeClsModel(net1, net2) device = torch.device('cpu') net = net.to(device).eval() dummy_input = torch.randn(1, 3, 512, 512).to(device)
parser.add_argument('REMAIN', nargs='*') args_parser = parser.parse_args() from lib.utils.distributed import handle_distributed handle_distributed(args_parser, os.path.expanduser(os.path.abspath(__file__))) if args_parser.seed is not None: random.seed(args_parser.seed) torch.manual_seed(args_parser.seed) cudnn.enabled = True cudnn.benchmark = args_parser.cudnn configer = Configer(args_parser=args_parser) data_dir = configer.get('data', 'data_dir') if isinstance(data_dir, str): data_dir = [data_dir] abs_data_dir = [os.path.expanduser(x) for x in data_dir] configer.update(['data', 'data_dir'], abs_data_dir) project_dir = os.path.dirname(os.path.realpath(__file__)) configer.add(['project_dir'], project_dir) if configer.get('logging', 'log_to_file'): log_file = configer.get('logging', 'log_file') new_log_file = '{}_{}'.format( log_file, time.strftime("%Y-%m-%d_%X", time.localtime())) configer.update(['logging', 'log_file'], new_log_file) else:
parser.add_argument('REMAIN', nargs='*') args_parser = parser.parse_args() from lib.utils.distributed import handle_distributed handle_distributed(args_parser, os.path.expanduser(os.path.abspath(__file__))) if args_parser.seed is not None: random.seed(args_parser.seed) torch.manual_seed(args_parser.seed) cudnn.enabled = True cudnn.benchmark = args_parser.cudnn configer = Configer(args_parser=args_parser) abs_data_dir = os.path.expanduser(configer.get('data', 'data_dir')) configer.update(['data', 'data_dir'], abs_data_dir) project_dir = os.path.dirname(os.path.realpath(__file__)) configer.add(['project_dir'], project_dir) if configer.get('logging', 'log_to_file'): log_file = configer.get('logging', 'log_file') new_log_file = '{}_{}'.format( log_file, time.strftime("%Y-%m-%d_%X", time.localtime())) configer.update(['logging', 'log_file'], new_log_file) else: configer.update(['logging', 'logfile_level'], None) Log.init(logfile_level=configer.get('logging', 'logfile_level'),
type=str2bool, nargs='?', default=False, dest='distributed', help='Use CUDNN.') args_parser = parser.parse_args() if args_parser.seed is not None: random.seed(args_parser.seed + args_parser.local_rank) torch.manual_seed(args_parser.seed + args_parser.local_rank) if args_parser.gpu is not None: torch.cuda.manual_seed_all(args_parser.seed + args_parser.local_rank) configer = Configer(args_parser=args_parser) cudnn.enabled = True if configer.get('data', 'multiscale') is None: cudnn.benchmark = args_parser.cudnn else: cudnn.benchmark = False if configer.get('gpu') is not None and not configer.get('distributed', default=False): os.environ["CUDA_VISIBLE_DEVICES"] = ','.join( str(gpu_id) for gpu_id in configer.get('gpu')) if configer.get('network', 'norm_type') is None: configer.update('network.norm_type', 'batchnorm') if torch.cuda.device_count() <= 1 or configer.get('distributed',
Log.info('mIoU (Class-wise)') iou_dict = self.seg_running_score.get_cls_iu() for cid, miou in iou_dict.items(): Log.info('\t{}\t{}'.format(cid, miou)) print(' & '.join('{:.1f}'.format(x * 100) for x in list(iou_dict.values()) + [self.seg_running_score.get_mean_iou()])) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--configs', default=None, type=str, dest='configs', help='The configs file of pose.') parser.add_argument('--gt_dir', default=None, type=str, dest='gt_dir', help='The groundtruth annotations.') parser.add_argument('--pred_dir', default=None, type=str, dest='pred_dir', help='The label dir of predict annotations.') parser.add_argument('--file_list', default='all') args = parser.parse_args() ade20k_evaluator = ADE20KEvaluator(Configer(configs=args.configs)) ade20k_evaluator.evaluate(args.pred_dir, args.gt_dir)
args_parser = parser.parse_args() from lib.utils.distributed import handle_distributed handle_distributed(args_parser, os.path.expanduser(os.path.abspath(__file__))) if args_parser.seed is not None: random.seed(args_parser.seed) torch.manual_seed(args_parser.seed) cudnn.enabled = True cudnn.benchmark = args_parser.cudnn print(args_parser) configer = Configer(args_parser=args_parser) data_dir = configer.get('data', 'data_dir') if isinstance(data_dir, str): data_dir = [data_dir] abs_data_dir = [os.path.expanduser(x) for x in data_dir] configer.update(['data', 'data_dir'], abs_data_dir) project_dir = os.path.dirname(os.path.realpath(__file__)) configer.add(['project_dir'], project_dir) if configer.get('logging', 'log_to_file'): log_file = configer.get('logging', 'log_file') new_log_file = '{}_{}'.format( log_file, time.strftime("%Y-%m-%d_%X", time.localtime())) configer.update(['logging', 'log_file'], new_log_file) else:
dest='test:out_dir', help='The test out directory of images.') # *********** Params for env. ********** parser.add_argument('--seed', default=None, type=int, help='manual seed') parser.add_argument('--cudnn', type=str2bool, nargs='?', default=True, help='Use CUDNN.') args_parser = parser.parse_args() if args_parser.seed is not None: random.seed(args_parser.seed) torch.manual_seed(args_parser.seed) cudnn.enabled = True cudnn.benchmark = args_parser.cudnn configer = Configer(args_parser=args_parser) abs_data_dir = os.path.expanduser(configer.get('data', 'data_dir')) configer.update(['data', 'data_dir'], abs_data_dir) if configer.get('gpu') is not None: os.environ["CUDA_VISIBLE_DEVICES"] = ','.join(str(gpu_id) for gpu_id in configer.get('gpu')) project_dir = os.path.dirname(os.path.realpath(__file__)) configer.add(['project_dir'], project_dir) if configer.get('logging', 'log_to_file'): log_file = configer.get('logging', 'log_file') new_log_file = '{}_{}'.format(log_file, time.strftime("%Y-%m-%d_%X", time.localtime())) configer.update(['logging', 'log_file'], new_log_file) else: configer.update(['logging', 'logfile_level'], None)
print(filename) pred_path = os.path.join(pred_dir, filename) gt_path = os.path.join(gt_dir, filename) predmap = ImageHelper.img2np(ImageHelper.read_image(pred_path, tool='pil', mode='P')) gtmap = ImageHelper.img2np(ImageHelper.read_image(gt_path, tool='pil', mode='P')) predmap = self.relabel(predmap) gtmap = self.relabel(gtmap) gtmap[gtmap == 0] = 255 self.seg_running_score.update(predmap[np.newaxis, :, :], gtmap[np.newaxis, :, :]) img_cnt += 1 Log.info('Evaluate {} images'.format(img_cnt)) Log.info('mIOU: {}'.format(self.seg_running_score.get_mean_iou())) Log.info('Pixel ACC: {}'.format(self.seg_running_score.get_pixel_acc())) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--configs', default=None, type=str, dest='configs', help='The configs file of pose.') parser.add_argument('--gt_dir', default=None, type=str, dest='gt_dir', help='The groundtruth annotations.') parser.add_argument('--pred_dir', default=None, type=str, dest='pred_dir', help='The label dir of predict annotations.') args = parser.parse_args() cocostuff_evaluator = COCOStuffEvaluator(Configer(configs=args.configs)) cocostuff_evaluator.evaluate(args.pred_dir, args.gt_dir)
def forward(self, x_): x = self.backbone(x_) x_dsn = self.dsn_head(x[-2]) x = self.asp_ocr_head(x[-1], x_dsn) x = self.head(x) x_dsn = F.interpolate(x_dsn, size=(x_.size(2), x_.size(3)), mode="bilinear", align_corners=True) x = F.interpolate(x, size=(x_.size(2), x_.size(3)), mode="bilinear", align_corners=True) return x_dsn, x if __name__ == "__main__": parser = argparse.ArgumentParser() # *********** Params for data. ********** parser.add_argument('--configs', default=None, type=str, dest='configs', help='The file of the hyper parameters.') parser.add_argument('--num_classes', default=19, type=int, dest='data:num_classes', help='Classes nb.') parser.add_argument('--backbone', type=str, dest='network:backbone', help='The backbone.',default="deepbase_resnet101_dilated8") parser.add_argument('--bn_type', type=str, dest='network:bn_type', help='The BN type.',default="inplace_abn") parser.add_argument('REMAIN', nargs='*') args_parser = parser.parse_args() configer = Configer(args_parser=args_parser) SpatialOCRNet(configer)
print(filename) pred_path = os.path.join(pred_dir, filename) gt_path = os.path.join(gt_dir, filename) predmap = ImageHelper.img2np(ImageHelper.read_image(pred_path, tool='pil', mode='P')) gtmap = ImageHelper.img2np(ImageHelper.read_image(gt_path, tool='pil', mode='P')) predmap = self.relabel(predmap) gtmap = self.relabel(gtmap) self.seg_running_score.update(predmap[np.newaxis, :, :], gtmap[np.newaxis, :, :]) img_cnt += 1 Log.info('Evaluate {} images'.format(img_cnt)) Log.info('mIOU: {}'.format(self.seg_running_score.get_mean_iou())) Log.info('Pixel ACC: {}'.format(self.seg_running_score.get_pixel_acc())) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--configs', default=None, type=str, dest='configs', help='The configs file of pose.') parser.add_argument('--gt_dir', default=None, type=str, dest='gt_dir', help='The groundtruth annotations.') parser.add_argument('--pred_dir', default=None, type=str, dest='pred_dir', help='The label dir of predict annotations.') args = parser.parse_args() pcontext_evaluator = PascalContextEvaluator(Configer(configs=args.configs)) pcontext_evaluator.evaluate(args.pred_dir, args.gt_dir)