def main():
    config = {
        'name': '9L Conv Exponential Spline MNIST (lr1e-3)',
        'eval_epochs': 1,
        'sample_epochs': 1,
        'log_interval': 100,
        'lr': 1e-3,
        'num_layers': 9,
        'batch_size': 100,
        'modified_grad': False,
        'add_recon_grad': False,
        'sym_recon_grad': False,
        'activation': 'Spline',
        'recon_loss_weight': 1.0,
        'log_timing': True
    }

    train_loader, val_loader, test_loader = load_data(batch_size=config['batch_size'])

    model = create_model(num_layers=config['num_layers'], 
                         sym_recon_grad=config['sym_recon_grad'],
                         activation=config['activation'],
                         recon_loss_weight=config['recon_loss_weight']).to('cuda')

    optimizer = optim.Adam(model.parameters(), lr=config['lr'], betas=(0.9, 0.999))
    scheduler = StepLR(optimizer, step_size=1, gamma=1.0)

    experiment = Experiment(model, train_loader, val_loader, test_loader,
                            optimizer, scheduler, **config)

    experiment.run()
예제 #2
0
def main():
    config = {
        'name': '2L-4K-16W Glow SNF (5x5 Kernel) MNIST w/ GECO',
        'eval_epochs': 1,
        'sample_epochs': 1,
        'log_interval': 100,
        'lr': 1e-3,
        'num_blocks': 2,
        'block_size': 4,
        'width': 16,
        'batch_size': 100,
        'modified_grad': True,
        'add_recon_grad': True,
        'sym_recon_grad': False,
        'actnorm': True,
        'split_prior': True,
        'activation': 'None',
        'recon_loss_weight': 1.0,
        'recon_loss_lr': 1e-3,
        'recon_alpha': 0.9,
        'sample_true_inv': True,
        'plot_recon': True,
        'vis_epochs': 1,
        'log_timing': False,
        'epochs': 1000
    }

    train_loader, val_loader, test_loader = load_data(
        data_aug=False, batch_size=config['batch_size'])

    model = create_model(num_blocks=config['num_blocks'],
                         block_size=config['block_size'],
                         width=config['width'],
                         sym_recon_grad=config['sym_recon_grad'],
                         actnorm=config['actnorm'],
                         split_prior=config['split_prior'],
                         recon_loss_weight=config['recon_loss_weight'],
                         recon_loss_lr=config['recon_loss_lr'],
                         recon_alpha=config['recon_alpha']).to('cuda')

    optimizer = optim.Adam(model.parameters(),
                           lr=config['lr'],
                           betas=(0.9, 0.999))
    scheduler = StepLR(optimizer, step_size=1, gamma=1.0)

    experiment = Experiment(model, train_loader, val_loader, test_loader,
                            optimizer, scheduler, **config)

    experiment.run()
def main():
    config = {
        'name': '2L-16K Glow SNF(1x1) recon 100x MNIST (lr1e-3)',
        'eval_epochs': 1,
        'sample_epochs': 1,
        'log_interval': 100,
        'lr': 1e-3,
        'num_blocks': 2,
        'block_size': 16,
        'batch_size': 100,
        'modified_grad': True,
        'add_recon_grad': True,
        'sym_recon_grad': False,
        'actnorm': True,
        'split_prior': True,
        'activation': 'None',
        'recon_loss_weight': 100.0,
        'sample_true_inv': True,
        'plot_recon': True
    }

    train_loader, val_loader, test_loader = load_data(
        batch_size=config['batch_size'])

    model = create_model(
        num_blocks=config['num_blocks'],
        block_size=config['block_size'],
        sym_recon_grad=config['sym_recon_grad'],
        actnorm=config['actnorm'],
        split_prior=config['split_prior'],
        recon_loss_weight=config['recon_loss_weight']).to('cuda')

    optimizer = optim.Adam(model.parameters(),
                           lr=config['lr'],
                           betas=(0.9, 0.999))
    scheduler = StepLR(optimizer, step_size=1, gamma=1.0)

    experiment = Experiment(model, train_loader, val_loader, test_loader,
                            optimizer, scheduler, **config)

    experiment.run()