def nn_experiments(X_train, Y_train, X_test, Y_test, weight_variance,
                   bias_variance, noise_variance):
    H = defaults.hidden_units
    num_layers = defaults.shared_depth
    D_IN = X_train.shape[1]
    D_OUT = 1
    X_train_var = Variable(torch.from_numpy(X_train).type(defaults.tdtype),
                           requires_grad=False)
    Y_train_var = Variable(torch.from_numpy(Y_train).type(defaults.tdtype),
                           requires_grad=False)
    X_test_var = Variable(torch.from_numpy(X_test).type(defaults.tdtype),
                          requires_grad=False)

    H = defaults.hidden_units
    model = shared.get_nn_model(D_IN, H, D_OUT, num_layers, 'identity',
                                weight_variance, bias_variance)
    burn_in = 200
    nthin = 50
    results_manager = shared.ResultsManager(Y_test, burn_in, nthin, True, True,
                                            True, True, True)
    num_samples = 1000000
    shared.nn_model_regression(X_train_var,
                               Y_train_var,
                               X_test_var,
                               model,
                               num_samples=num_samples,
                               epsilon=0.0005,
                               beta=0.1,
                               leap_frog_iters=10,
                               noise_variance=noise_variance,
                               results_manager=results_manager)
    return results_manager
def main():
    torch.manual_seed(2)
    torch.set_num_threads(1)

    num_dim = 4
    num_train = 10
    num_test = num_train

    #randomly generate X and Y
    rng = np.random.RandomState(3)
    X_train = rng.randn(num_train, num_dim)
    X_test = rng.randn(num_test, num_dim)
    #create a random network.
    H = defaults.hidden_units
    num_layers = defaults.shared_depth
    D_IN = X_train.shape[1]
    D_OUT = 1
    model = shared.get_nn_model(D_IN, H, D_OUT, num_layers)

    #create torch version of inputs
    X_train_t = Variable(torch.from_numpy(X_train).type(defaults.tdtype),
                         requires_grad=False)
    X_test_t = Variable(torch.from_numpy(X_test).type(defaults.tdtype),
                        requires_grad=False)

    #create train and test data.
    f_train = model(X_train_t)
    f_test = model(X_test_t)

    #corrupt with correct level of noise.
    Y_train = (f_train + Variable(torch.randn(*f_train.size()).type(
        defaults.tdtype).mul(math.sqrt(defaults.noise_variance)),
                                  requires_grad=False)).detach()
    Y_test = (f_test + Variable(torch.randn(*f_test.size()).type(
        defaults.tdtype).mul(math.sqrt(defaults.noise_variance)),
                                requires_grad=False)).detach()

    densities_gp = gp_experiments(X_train, Y_train.data.numpy(), X_test,
                                  Y_test.data.numpy(), num_layers)
    #print('hold_out_gp ', hold_out_gp )

    results = nn_experiments(X_train_t, Y_train, X_test_t, Y_test, H,
                             num_layers)
    results['log_densities_GP'] = densities_gp
    results['X_train'] = X_train
    results['X_test'] = X_test
    results['Y_train'] = Y_train
    results['Y_test'] = Y_train

    pickle.dump(results, open(results_file_name, 'wb'))
    embed()
    plt.show()
示例#3
0
def nn_experiments(X,Y,grid_points):
    H = defaults.hidden_units
    num_layers = defaults.shared_depth
    D_IN = X.shape[1]
    D_OUT = 1
    X_var = Variable( torch.from_numpy(X).type(defaults.tdtype), requires_grad=False)
    Y_var = Variable( torch.from_numpy(Y).type(defaults.tdtype), requires_grad=False)
    grid_var = Variable( torch.from_numpy(grid_points).type(defaults.tdtype), requires_grad=False)
    
    model = shared.get_nn_model(D_IN,H,D_OUT, num_layers)
    #model.cuda()
    #3000
    nthin = 1
    burn_in = 50
    results_manager = shared.ResultsManager(None, burn_in, nthin, False, False, True, False, True)
    shared.nn_model_regression(X_var,Y_var,grid_var, model, num_samples = 30000, epsilon = 0.05, beta = 1., leap_frog_iters=10, results_manager = results_manager )
    
    return results_manager