Ejemplo n.º 1
0
def gp_experiments(X, Y):

    gp_model = shared.get_gp_model(X,
                                   Y,
                                   input_dim=2,
                                   depth=defaults.shared_depth)
    return gp_model.compute_log_likelihood()
Ejemplo n.º 2
0
def gp_experiments(X, Y, grid_points):  #, fig, axes):

    gp_model = shared.get_gp_model(X,
                                   Y,
                                   input_dim=1,
                                   depth=defaults.shared_depth)
    gp_mean, gp_var = gp_model.predict_f(grid_points)

    return gp_mean, gp_var
Ejemplo n.º 3
0
def gp_experiments(X_train, Y_train, X_test, Y_test, num_layers):

    gp_model = shared.get_gp_model(X_train,
                                   Y_train,
                                   input_dim=X_train.shape[1],
                                   depth=num_layers)
    individual_densities = gp_model.predict_density(X_test, Y_test)
    print('individual GP densities ', individual_densities)
    holdout = individual_densities.mean()
    return individual_densities
Ejemplo n.º 4
0
def gp_experiments(X, Y, grid_points):

    gp_model = shared.get_gp_model(X,
                                   Y,
                                   input_dim=2,
                                   depth=defaults.shared_depth)
    gp_model.optimize()
    gp_mean, gp_var = gp_model.predict_f(grid_points)

    return gp_mean, gp_var, gp_model
def gp_experiments(X_train, Y_train, X_test, Y_test):
    gp_model = shared.get_gp_model(X_train,
                                   Y_train,
                                   input_dim=X_train.shape[1],
                                   depth=defaults.shared_depth)
    gp_model.optimize()
    individual_log_densities = gp_model.predict_density(X_test, Y_test)
    holdout_ll = individual_log_densities.mean()
    pred_mean, pred_var = gp_model.predict_f(X_test)
    norm_rmse = shared.nrmse(Y_test, pred_mean)
    return holdout_ll, norm_rmse, gp_model, individual_log_densities
axes.plot(log_pred_densities_gp, log_pred_densities_nn, 'bo')
axes.set_xlabel('Gaussian process log density')
axes.set_ylabel('Neural network log density')
axes.axis('square')
axes.set_xlim(shared_limits)
axes.set_ylim(shared_limits)
random_point_index = 9

plt.savefig('../figures/comparison_bias_four_a.pdf')

[fig, axesB] = plt.subplots(1, 1, figsize=(4.5, 4.5))
axesB.hist(pred_trackers[50:, random_point_index], 30, normed=True)
test_point = np.atleast_2d(X_test[random_point_index, :])

#add gp density to plot.
gp_model = shared.get_gp_model(X_train,
                               Y_train,
                               input_dim=X_train.shape[1],
                               depth=defaults.shared_depth)
pred_mean, pred_var = gp_model.predict_f(test_point)
pred_std = np.sqrt(pred_var)
plot_range = [-3., 2.]
x_points = np.linspace(*plot_range, 100)
densities = norm.pdf(x_points, loc=pred_mean, scale=pred_std)
axesB.plot(x_points, densities.flatten())
axesB.set_xlabel('Function value')
axesB.set_ylabel('Density')
plt.savefig('../figures/comparison_bias_four_b.pdf')
embed()
plt.show()