Esempio n. 1
0
def load(path):
    """
    Loads a trained model.

    Parameters
    ----------
    path : string
        Path to folder where model is saved. For example
        './trained_models/mnist/'. Note the path MUST end with a '/'
    """
    path_to_specs = path + 'specs.json'
    path_to_model = path + '8356.pt'

    # Open specs file
    with open(path_to_specs) as specs_file:
        specs = json.load(specs_file)

    # Unpack specs
    dataset = specs["dataset"]
    latent_spec = specs["latent_spec"]

    # Get image size
    if dataset == 'mnist' or dataset == 'fashion_mnist':
        img_size = (1, 32, 32)
    if dataset == 'chairs' or dataset == 'dsprites':
        img_size = (1, 64, 64)
    if dataset == 'celeba':
        img_size = (3, 64, 64)

    # Get model
    model = VAE(img_size=img_size, latent_spec=latent_spec)
    model.load_state_dict(
        torch.load(path_to_model, map_location=lambda storage, loc: storage))

    return model
Esempio n. 2
0
def training_process(num,Acc_dic):
    dataset = "mnist"
    load_data = False
    viz_on = False

    path = './trained_models/'+dataset+'/'
    model_path = './trained_models/'+dataset+'/model'+str(num)+'.pt'
    spec,img_size = load_param(path)
    print(spec)
    print("Training Start!!! :{}".format(num))

    batch_size = spec['batch_size']
    lr = spec['lr'][0]
    epochs = spec['epochs'][0]

    # Check for cuda
    use_cuda = torch.cuda.is_available()
    print("Use_cuda:%s"%use_cuda)
    # Load data
    data_loader,_= get_mnist_dataloaders(batch_size=batch_size)

    # Define latent spec and model
    latent_spec = spec['latent_spec']
    locals()['model_'+str(i)] = VAE(img_size=img_size, latent_spec=latent_spec,
                use_cuda=use_cuda)
    if use_cuda:
        locals()['model_'+str(i)].cuda()

    # Define optimizer
    optimizer = optim.Adam(locals()['model_'+str(i)].parameters(), lr=lr)

    # Define trainer
    trainer = Trainer(locals()['model_'+str(i)], optimizer,
                      cont_capacity=spec['cont_capacity'],
                      disc_capacity=spec['disc_capacity'],
                      spec=spec,
                      viz_on = viz_on,
                      use_cuda=use_cuda,
                      num = num)

    # Train model for 100 epochs
    acc = trainer.train(data_loader, epochs)
    Acc_dic[num] = acc
    # Save trained model
    torch.save(trainer.model.state_dict(), model_path)
    print("Training finished!!! :{}".format(num))
Esempio n. 3
0
from torch import optim

batch_size = 64
lr = 5e-4
epochs = 100

# Check for cuda
use_cuda = torch.cuda.is_available()

# Load data
data_loader, _ = get_mnist_dataloaders(batch_size=batch_size)
img_size = (1, 32, 32)

# Define latent spec and model
latent_spec = {'cont': 10, 'disc': [10]}
model = VAE(img_size=img_size, latent_spec=latent_spec, use_cuda=use_cuda)
if use_cuda:
    model.cuda()

# Define optimizer
optimizer = optim.Adam(model.parameters(), lr=lr)

# Define trainer
trainer = Trainer(model,
                  optimizer,
                  cont_capacity=[0.0, 5.0, 25000, 30],
                  disc_capacity=[0.0, 5.0, 25000, 30],
                  use_cuda=use_cuda)

# Train model for 100 epochs
trainer.train(data_loader, epochs)