Exemplo n.º 1
0
                                                  batch_size=50)

    # Create DenseED model
    denseED = DenseED(in_channels=args.nic,
                      out_channels=args.noc,
                      blocks=args.blocks,
                      growth_rate=args.growth_rate,
                      init_features=args.init_features,
                      bn_size=args.bn_size,
                      drop_rate=args.drop_rate,
                      bottleneck=False,
                      out_activation=None).to(args.device)

    # Bayesian neural network
    bayes_nn = BayesNN(args, denseED)
    swag_nn = SwagNN(args, bayes_nn, full_cov=True, max_models=10)

    # First predict with determinisitic
    swag_nn.loadModel(100, file_dir='networks')
    with torch.no_grad():
        mse_error = testMSE(args, swag_nn.base, testing_loader, tstep=nsteps)

    # Predict with Bayesian
    swag_nn.loadModel(200, file_dir='networks')
    with torch.no_grad():
        mse_error_bayes = testSamplesMSE(args,
                                         swag_nn,
                                         testing_loader,
                                         tstep=nsteps,
                                         n_samples=30)
Exemplo n.º 2
0
    test_cases = np.arange(1,5+1e-8,1).astype(int) # Some validation/test data
    testing_loader = ksLoader.createTestingLoader(args.data_dir, test_cases, batch_size=args.test_batch_size)

    # Create DenseED model
    denseED = DenseED(in_channels=args.nic, out_channels=args.noc,
                        blocks=args.blocks,
                        growth_rate=args.growth_rate, 
                        init_features=args.init_features,
                        bn_size=args.bn_size,
                        drop_rate=args.drop_rate,
                        bottleneck=False,
                        out_activation=None).to(args.device)
    # Bayesian neural network
    bayes_nn = BayesNN(args, denseED)
    # Stochastic weighted averages
    swag_nn = SwagNN(args, bayes_nn, full_cov=True, max_models=args.swag_max)
    
    # Create optimizer and learning rate scheduler
    parameters = [{'params': [bayes_nn.model.log_beta], 'lr': args.lr_beta},
                    {'params': bayes_nn.model.features.parameters()}]
    optimizer = torch.optim.Adam(parameters, lr=args.lr, weight_decay=0.0)
    # Learning rate scheduler
    scheduler = ExponentialLR(optimizer, gamma=0.995)


    # If we are starting from a specific epoch, attempt to load a model
    if(args.epoch_start > 0):
        optimizer, scheduler = swag_nn.loadModel(args.epoch_start, optimizer, scheduler, file_dir=args.ckpt_dir)

    # Create Ks time integrator
    # Here we will use 4th order finite differences for spacial derivatives
Exemplo n.º 3
0
    # Create DenseED model
    denseED = DenseED(in_channels=args.nic,
                      out_channels=1,
                      blocks=args.blocks,
                      growth_rate=args.growth_rate,
                      init_features=args.init_features,
                      bn_size=args.bn_size,
                      drop_rate=args.drop_rate,
                      bottleneck=False,
                      out_activation=None).to(args.device)

    # Bayesian neural network
    bayes_nn = BayesNN(args, denseED)
    # Stochastic weighted averages
    swag_nn = SwagNN(args, bayes_nn, full_cov=True, max_models=30)
    # Load network
    swag_nn.loadModel(200, file_dir="./networks")

    with torch.no_grad():
        uPred, betas, uTarget = testSample(args,
                                           swag_nn,
                                           testing_loader,
                                           tstep=400,
                                           n_samples=30)

    tTest = np.arange(0, 400 * args.dt + 1e-8, args.dt)
    xTest = np.linspace(x0, x1, args.nel + 1)

    # With the neural network simulated, now load numerical simulators
    # Finite element
Exemplo n.º 4
0
    testing_loader = burgerLoader.createTestingLoader('../solver/fenics_data', test_cases, simdt=0.005, batch_size=2)

    # Create DenseED model
    denseED = DenseED(in_channels=2*args.nic, out_channels=2*args.noc,
                        blocks=args.blocks,
                        growth_rate=args.growth_rate, 
                        init_features=args.init_features,
                        bn_size=args.bn_size,
                        drop_rate=args.drop_rate,
                        bottleneck=False,
                        out_activation=None).to(args.device)

    # Bayesian neural network
    bayes_nn = BayesNN(args, denseED)
    # Stochastic weighted averages
    swag_nn = SwagNN(args, bayes_nn, full_cov=True, max_models=args.swag_max)
    # Load the model
    swag_nn.loadModel(200, file_dir='./networks')

    n_test = 150
    with torch.no_grad():
        uPred, uTarget, betas = testSample(args, swag_nn, testing_loader, tstep=n_test, n_samples=30)
    
    step = 20
    pred_steps = np.arange(0, n_test+1, step)
    target_steps = pred_steps//2
    
    plt.close("all")
    case = 0
    plotPred(args, test_cases[case], uPred[case].cpu().numpy(), uTarget[case].cpu().numpy(), betas.cpu().numpy(), target_steps, pred_steps)
    case = 1