Esempio n. 1
0
 def __init__(self, args):
     self.args = args
     # data transforms
     input_transform = transform.Compose([
         transform.ToTensor(),
         transform.Normalize([.485, .456, .406], [.229, .224, .225])])
     # dataset
     data_kwargs = {'transform': input_transform, 'base_size': args.base_size, 'crop_size': args.crop_size}
     trainset = get_segmentation_dataset(args.dataset, split=args.train_split, mode='train', root=args.data_folder, **data_kwargs)
     testset = get_segmentation_dataset(args.dataset, split='val', mode ='val', root=args.data_folder, **data_kwargs)
     # dataloader
     kwargs = {'num_workers': args.workers, 'pin_memory': True} 
     self.trainloader = data.DataLoader(trainset, batch_size=args.batch_size, \
                                        drop_last=True, shuffle=True, **kwargs)
     self.valloader = data.DataLoader(testset, batch_size=args.batch_size, \
                                      drop_last=False, shuffle=False, **kwargs)
     self.nclass = args.num_classes
     self.best_pred = 0.0 
     # model
     if args.model == "unet":
         from code.models import UNet
         model =  UNet(n_channels=3, n_classes=args.num_classes, bilinear=False)
         optimizer = torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.weight_decay)
     else:     
         model = fpn.get_fpn(nclass=args.num_classes, backbone=args.backbone, pretrained=False)
         # optimizer using different LR
         params_list = [{'params': model.pretrained.parameters(), 'lr': args.lr},]
         if hasattr(model, 'head'):
             params_list.append({'params': model.head.parameters(), 'lr': args.lr*10})
         # optimizer = torch.optim.SGD(params_list, lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay)
         optimizer = torch.optim.Adam(params_list, lr=args.lr, weight_decay=args.weight_decay)
     # criterions
     self.criterion = torch.nn.CrossEntropyLoss(ignore_index=args.ignore_label)
     self.model, self.optimizer = model, optimizer
     # using cuda
     if args.cuda:
         self.model = self.model.cuda()
         self.criterion = self.criterion.cuda()
     # resuming checkpoint
     if args.weight is not None:
         if not os.path.isfile(args.weight):
             raise RuntimeError("=> no checkpoint found at '{}'" .format(args.weight))
         checkpoint = torch.load(args.weight, map_location='cuda:0')
         checkpoint['state_dict'] = OrderedDict([(k[5:], v) if 'base' in k else (k, v) for k, v in checkpoint['state_dict'].items()])
         args.start_epoch = checkpoint['epoch']
         if args.cuda:
             self.model.load_state_dict(checkpoint['state_dict'], strict=False)
         else:
             self.model.load_state_dict(checkpoint['state_dict'])
             self.optimizer.load_state_dict(checkpoint['optimizer'])
         self.best_pred = checkpoint['best_pred']
         print("=> loaded checkpoint '{}' (epoch {})" \
               .format(args.weight, checkpoint['epoch']))
     # clear start epoch if fine-tuning
     self.scheduler = LR_Scheduler(args.lr_scheduler, args.lr, \
                                         args.epochs, len(self.trainloader), warmup_epochs=5)
Esempio n. 2
0
    def analyze(self):
        from code.datasets import get_segmentation_dataset
        input_transform = transform.Compose([
            transform.ToTensor(),
            transform.Normalize([.485, .456, .406], [.229, .224, .225])
        ])

        data_kwargs = {
            'transform': input_transform,
            'base_size': args.base_size,
            'crop_size': args.crop_size
        }
        testset = get_segmentation_dataset(args.dataset,
                                           split='val',
                                           mode='testval',
                                           root=args.data_folder,
                                           **data_kwargs)
        loader_kwargs = {
            'num_workers': args.workers,
            'pin_memory': True
        } if args.cuda else {}
        test_data = data.DataLoader(testset,
                                    batch_size=args.batch_size,
                                    drop_last=False,
                                    shuffle=False)
        self.pruner.ana(ana_eval_fn, args=(test_data, self.criterion))
Esempio n. 3
0
def build_data(args, subset_len=None, sample_method='random'):
    from code.datasets import get_segmentation_dataset
    input_transform = transform.Compose([
        transform.ToTensor(),
        transform.Normalize([.485, .456, .406], [.229, .224, .225])
    ])

    data_kwargs = {
        'transform': input_transform,
        'base_size': args.base_size,
        'crop_size': args.crop_size
    }
    if args.eval:
        testset = get_segmentation_dataset(args.dataset,
                                           split='val',
                                           mode='testval',
                                           root=args.data_folder,
                                           **data_kwargs)
    loader_kwargs = {
        'num_workers': args.workers,
        'pin_memory': True
    } if args.cuda else {}
    if subset_len:
        assert subset_len <= len(testset)
        if sample_method == 'random':
            testset = torch.utils.data.Subset(
                testset, random.sample(range(0, len(test_data)), subset_len))
        else:
            testset = torch.utils.data.Subset(testset, list(range(subset_len)))
    #dataloader
    test_data = data.DataLoader(testset,
                                batch_size=args.batch_size,
                                drop_last=False,
                                shuffle=False)
    return test_data
Esempio n. 4
0
    last_snapshot_name = None
    best_snapshot = None
    if (args.eval_pruned == True):
        from code.datasets import get_segmentation_dataset
        input_transform = transform.Compose([
            transform.ToTensor(),
            transform.Normalize([.485, .456, .406], [.229, .224, .225])
        ])
        data_kwargs = {
            'transform': input_transform,
            'base_size': args.base_size,
            'crop_size': args.crop_size
        }
        testset = get_segmentation_dataset(args.dataset,
                                           split='val',
                                           mode='testval',
                                           root=args.data_folder,
                                           **data_kwargs)
        loader_kwargs = {
            'num_workers': args.workers,
            'pin_memory': True
        } if args.cuda else {}
        test_data = data.DataLoader(testset,
                                    batch_size=args.batch_size,
                                    drop_last=False,
                                    shuffle=False)
        evaluate(test_data, trainer.model, trainer.criterion)

    if (args.prune == True):
        if (args.prune_model_py is None and args.pruned_weights is None):
            trainer.analyze()