net = RNNModel(rnn_type='LSTM', ntoken=helper.n_tokens, ninp=helper.params['emsize'], nhid=helper.params['nhid'], nlayers=helper.params['nlayers'], dropout=helper.params['dropout'], tie_weights=helper.params['tied']) else: net = Net() if helper.params.get('multi_gpu', False): device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") logger.info(f"Let's use {torch.cuda.device_count()} GPUs!") net = nn.DataParallel(net) net.to(device) if helper.params.get('resumed_model', False): logger.info('Resuming training...') loaded_params = torch.load( f"saved_models/{helper.params['resumed_model']}") net.load_state_dict(loaded_params['state_dict']) helper.start_epoch = loaded_params['epoch'] # helper.params['lr'] = loaded_params.get('lr', helper.params['lr']) logger.info( f"Loaded parameters from saved model: LR is" f" {helper.params['lr']} and current epoch is {helper.start_epoch}" ) else: helper.start_epoch = 1
num_workers=args.num_worker) test_loader = DataLoader(dataset_test, batch_size=args.batch_size_test, shuffle=False, num_workers=args.num_worker) classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck') print('==> Making model..') net = DenseNet(growth_rate=args.growth_rate, theta=args.theta, num_layers=[12, 12, 12], num_classes=10) net = net.to(device) if device == 'cuda': net = torch.nn.DataParallel(net) cudnn.benchmark = True num_params = sum(p.numel() for p in net.parameters() if p.requires_grad) print('The number of parameters of model is', num_params) if args.resume is not None: checkpoint = torch.load('./save_model/' + args.resume) net.load_state_dict(checkpoint['net']) criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(net.parameters(), lr=args.lr, momentum=args.momentum,