def mnist(cuda=True, model_root=None): print("Building and initializing mnist parameters") from mnist import model, dataset m = model.mnist(pretrained=os.path.join(model_root, 'mnist.pth')) if cuda: m = m.cuda() return m, dataset.get, False
def mnist(cuda=True, model_root=None, **kwargs): print("Building and initializing mnist parameters") from mnist import model, dataset use_model_zoo = False if model_root else True m = model.mnist(pretrained=os.path.join(model_root, 'mnist_paper.pth'), use_model_zoo=use_model_zoo, **kwargs) if cuda: m = m.cuda() return m, dataset.get, False
print_to_log('{}: {}'.format(k, v)) print_to_log("=======================") # seed is_cuda = torch.cuda.is_available() print_to_log("is_cuda: {}".format(is_cuda)) torch.manual_seed(args.seed) if is_cuda and args.to_cuda: torch.cuda.manual_seed(args.seed) # data loader train_loader, test_loader = dataset.get(batch_size=args.batch_size, data_root=args.data_root, num_workers=1) # model model = model.mnist(input_dims=784, n_hiddens=[256, 256], n_class=10) if is_cuda and args.to_cuda: model.cuda() # optimizer optimizer = optim.SGD(model.parameters(), lr=args.lr, weight_decay=args.wd, momentum=0.9) decreasing_lr = list(map(int, args.decreasing_lr.split(','))) print_to_log('decreasing_lr: ' + str(decreasing_lr)) best_acc, old_file = 0, None t_begin = time.time() try: # ready to go for epoch in range(args.epochs): model.train() if epoch in decreasing_lr: