def main(): args = Parser().parse() use_cuda = not args.cpu and torch.cuda.is_available() device = torch.device(f"cuda:{args.gpu}" if use_cuda else "cpu") torch.manual_seed(args.seed) train_loader, val_loader, test_loader = get_data(args) # labels should be a whole number from [0, num_classes - 1] num_labels = 10 #int(max(max(train_data.targets), max(test_data.targets))) + 1 output_size = num_labels setup_logging(args) if 'resnet' in args.model: constructor = getattr(resnet, args.model) model_stoch = constructor(True, num_labels, device).to(device) model_det = constructor(False, num_labels, device).to(device) elif 'vgg' in args.model: constructor = getattr(vgg, args.model) model_stoch = constructor(True, num_labels, device, args.orthogonal).to(device) model_det = constructor(False, num_labels, device, args.orthogonal).to(device) else: stoch_args = [True, True, device] det_args = [False, False, device] model_stoch = lenet5.LeNet5(*stoch_args).to(device) model_det = lenet5.LeNet5(*det_args).to(device) # load saved parameters saved_models = glob(f'/scratch/bsm92/{args.model}_{args.dataset}*.pt') saved_det = saved_models[0] if 'det' in saved_models[0] else saved_models[1] saved_stoch = saved_models[1 - saved_models.index(saved_det)] it = zip([model_stoch, model_det], [saved_stoch, saved_det]) for model, param_path in it: saved_state = torch.load(param_path, map_location=device) if param_path[-4:] == '.tar': saved_state = saved_state['model_state_dict'] model.load_state_dict(saved_state) loss = torch.nn.CrossEntropyLoss() test(args, model_det, device, test_loader, loss, 10) test(args, model_stoch, device, test_loader, loss, 10)
def main(): args = parse_args() use_cuda = not args.cpu and torch.cuda.is_available() device = torch.device(f"cuda:{args.gpu}" if use_cuda else "cpu") torch.manual_seed(args.seed) trainset, testset, trainloader, testloader = stl10(args.batch_size) model = vgg.vgg16(not args.deterministic, device, False).to(device) saved_state = torch.load(args.saved_model) if args.saved_model[-4:] == '.tar': saved_state = saved_state['model_state_dict'] model.load_state_dict(saved_state) criterion = torch.nn.CrossEntropyLoss() test_loss, pct_right = test(args, model, device, testloader, criterion, 10) print(f'test loss: {test_loss}, correct: {100*pct_right}')
model_name = "model_nn1" model_file_name = model_name + ".ptm" n_epochs = 10 l_rate = 0.0000001 class MnistNN1(nn.Module): def __init__(self, input_dim, inter_dim, labels_dim): super(MnistNN1, self).__init__() self.lin1 = nn.Linear(input_dim, inter_dim) self.lin2 = nn.Linear(inter_dim, labels_dim) def forward(self, img): out = self.lin1(img.view(1, -1)) out = self.lin2(out) return F.log_softmax(out, dim=1) model = MnistNN1(784, 800, 10) model.load_state_dict(ts.load(model_file_name)) run_model.train(model, l_rate, n_epochs, optim.Adam) run_model.test(model_name, model, n_epochs) ts.save(model.state_dict(), model_file_name)
# Imports here import torch import argparse import run_model # Get data from command line parser = argparse.ArgumentParser() parser.add_argument('path', action="store", type=str, help = 'Name of directory where pictures are located, example = flowers') parser.add_argument('--epochs', type=int, default = 2, help = 'Set epochs for nn model [0-3], default 1') parser.add_argument('--learning_rate', type=float, default = 0.003, help = 'Set learning rate, default 0.003') parser.add_argument('--arch', type=str, default = 'densenet121', help = 'Set the torchvision model used for prediction, default = densenet121') parser.add_argument('--hidden_units', type=int, default = 256, help = 'set hidden units') path = parser.parse_args().path.strip('/') epochs = parser.parse_args().epochs learning_rate = parser.parse_args().learning_rate arch = parser.parse_args().arch.strip('"') hidden_units = parser.parse_args().hidden_units #load data trainloader, validloader, testloader, class_to_idx = run_model.load_data(path) # train model run_model.train(trainloader, validloader, class_to_idx, epochs, learning_rate, arch, hidden_units) # test model run_model.test(testloader) print('Done training') #and we are done