Пример #1
0
                                   download=True,
                                   transform=transform)
    test_dataset = datasets.MNIST(data_dir,
                                  train=False,
                                  download=False,
                                  transform=transform)
    e.setup_dataloader((train_dataset, None, test_dataset))

    # Plot a subset of the training dataset
    utils.plot_data_subset(e.fname("dataset_image.png"), train_dataset)

    # Setup the two models
    e.generator = models.densegan4_generator(e)
    e.discriminator = models.densegan4_discriminator(e)

    # Criterion (or loss function) used
    e.criterion = nn.BCELoss()

    # The optimizer for weight updating
    e.g_optimizer = optim.Adam(e.generator.parameters(),
                               lr=e.params["lr"],
                               betas=e.params["betas"])
    e.d_optimizer = optim.Adam(e.discriminator.parameters(),
                               lr=e.params["lr"],
                               betas=e.params["betas"])

    # Train model and plot results
    gantraining.train_model(
        e, flatten_input=True)  # This network requires flattened images.
    gantraining.plot_all(e)
Пример #2
0
    e.params["betas"]       = (0.5, 0.999) # Betas hyperparameter for Adam optimizers
    e.params["patience"]    = 7 # Number of epochs to wait before early stopping

    # Setup the CIFAR10 dataset
    transform = utils.image_transform(e.params["im_size"])
    data_dir = "data/celeba/"

    # WARNING DOWNLOAD IS 1.4 GB in size!!! 
    # train_dataset = datasets.CelebA(data_dir, split="train", download=False, transform=transform)

    train_dataset = datasets.ImageFolder(data_dir, transform=transform)
    e.setup_dataloader((train_dataset, None, None))
    
    # Plot a subset of the training dataset
    utils.plot_data_subset(e.fname("dataset_image.png"), train_dataset, show_labels=False)

    # Setup the two models
    e.generator = models.dcgan4_generator(e)
    e.discriminator = models.dcgan4_discriminator(e)

    # Criterion (or loss function) used
    e.criterion = nn.BCELoss()

    # The optimizer for weight updating
    e.g_optimizer = optim.Adam(e.generator.parameters(), lr=e.params["lr"], betas=e.params["betas"])
    e.d_optimizer = optim.Adam(e.discriminator.parameters(), lr=e.params["lr"], betas=e.params["betas"])

    # Train model and plot results
    gantraining.train_model(e)
    gantraining.plot_all(e)