def main(): global args, config args = parser.parse_args() with open(args.config) as rPtr: config = EasyDict(yaml.load(rPtr)) config.save_path = os.path.dirname(args.config) # Random seed torch.manual_seed(config.seed) torch.cuda.manual_seed(config.seed) np.random.seed(config.seed) random.seed(config.seed) # Datasets train_transform = transforms.Compose([ transforms.RandomCrop((32, 32), padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)) ]) val_transform = transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)) ]) trainset = Datasets.CIFAR10(root='data', train=True, download=True, transform=train_transform) trainloader = Data.DataLoader(trainset, batch_size=config.batch_size, shuffle=True, num_workers=config.workers) testset = Datasets.CIFAR10(root='data', train=False, download=True, transform=val_transform) testloader = Data.DataLoader(testset, batch_size=config.batch_size, shuffle=False, num_workers=config.workers) # Model model = resnet32() if config.zerogamma: print('all BN layers weights that sit at the end of a residual block are set 0.0') Zerogamma(model, last_bn_name='bn2') model = model.cuda() # Optimizer criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(model.parameters(), lr=config.lr_scheduler.base_lr, momentum=config.momentum, weight_decay=config.weight_decay) # LR scheduler # lr_scheduler = get_scheduler(optimizer, config.lr_scheduler) lr_scheduler = CosineAnnealing(optimizer, config.max_iter * len(trainloader)) global PCA, Writer PCA = PerClassAccuracy(num_classes=config.num_classes) Writer = SummaryWriter(config.save_path + '/events') for iter_idx in range(config.max_iter): train(model, iter_idx, criterion, lr_scheduler, optimizer, trainloader) val(model, iter_idx, criterion, testloader) Writer.close()
def main(): global args, config args = parser.parse_args() with open(args.config) as rPtr: config = EasyDict(yaml.load(rPtr)) config.save_path = os.path.dirname(args.config) # Random seed torch.manual_seed(config.seed) torch.cuda.manual_seed(config.seed) np.random.seed(config.seed) random.seed(config.seed) # Datasets train_transform = transforms.Compose([ transforms.RandomCrop((32, 32), padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)) ]) val_transform = transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)) ]) trainset = Datasets.CIFAR10(root='data', train=True, download=True, transform=train_transform) trainloader = Data.DataLoader(trainset, batch_size=config.batch_size, shuffle=True, num_workers=config.workers) testset = Datasets.CIFAR10(root='data', train=False, download=True, transform=val_transform) testloader = Data.DataLoader(testset, batch_size=config.batch_size, shuffle=False, num_workers=config.workers) # Model model = resnet32() model = model.cuda() # Optimizer criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(model.parameters(), lr=config.lr_scheduler.base_lr, momentum=config.momentum) global lr_warmup_scheduler lr_warmup_scheduler = Warmup(optimizer, config.warmup.warup_times) # LR scheduler lr_scheduler = get_scheduler(optimizer, config.lr_scheduler) global PCA, Writer PCA = PerClassAccuracy(num_classes=config.num_classes) Writer = SummaryWriter(config.save_path + '/events') for iter_idx in range(config.max_iter): if config.warmup.type == 'epoch' and iter_idx < config.warmup.warup_times: lr_warmup_scheduler.step() train(model, iter_idx, criterion, lr_scheduler, optimizer, trainloader) val(model, iter_idx, criterion, testloader) Writer.close()
def main(): # global args, config # # args = parser.parse_args() # # with open(args.config) as rPtr: # config = EasyDict(yaml.load(rPtr)) # # config.save_path = os.path.dirname(args.config) # # # Random seed # torch.manual_seed(config.seed) # torch.cuda.manual_seed(config.seed) # np.random.seed(config.seed) # random.seed(config.seed) # Datasets train_transform = transforms.Compose([ transforms.RandomCrop((32, 32), padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)) ]) val_transform = transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)) ]) trainset = Datasets.CIFAR10(root='data', train=True, download=True, transform=train_transform) trainloader = Data.DataLoader(trainset, batch_size=config.batch_size, shuffle=True, num_workers=config.workers) testset = Datasets.CIFAR10(root='data', train=False, download=True, transform=val_transform) testloader = Data.DataLoader(testset, batch_size=config.batch_size, shuffle=False, num_workers=config.workers) # Model model = resnet32() model = model.cuda() # Optimizer criterion = LabelSmoothing(config.label_smoothing) optimizer = optim.SGD(model.parameters(), lr=config.lr_scheduler.base_lr, momentum=config.momentum, weight_decay=config.weight_decay) # LR scheduler lr_scheduler = CosineAnnealing(optimizer, len(trainloader) * config.max_iter) global PCA, Writer PCA = PerClassAccuracy(num_classes=config.num_classes) Writer = SummaryWriter(config.save_path + '/events') BEST_mAP = 0.0 for iter_idx in range(config.max_iter): train(model, iter_idx, criterion, lr_scheduler, optimizer, trainloader) mAP = val(model, iter_idx, criterion, testloader) if mAP > BEST_mAP: BEST_mAP = mAP nni.report_intermediate_result(mAP) nni.report_final_result(BEST_mAP) Writer.close()