Example #1
0
        plot(mod, i)

    return opt_update(i, gradients, state)


print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(2):
    opt_state = gradient_step(j, opt_state, model)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1-t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
posterior_mean, posterior_cov, inducing_mean, inducing_cov, nlpd = model.predict()
mu = np.squeeze(posterior_mean)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1-t0))
print('NLPD: %1.2f' % nlpd)

# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp:
#     pickle.dump(nlpd, fp)

# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp:
#     nlpd_show = pickle.load(fp)
# print(nlpd_show)

test_id = model.test_id
t_test = model.t_all[test_id]
link_fn = model.likelihood.link_fn
Example #2
0
    return opt_update(i, gradients, state)


print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(250):
    opt_state = gradient_step(j, opt_state, model)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1-t0))

x_plot = np.linspace(np.min(Xall)-0.2, np.max(Xall)+0.2, 200)
# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
nlpd = model.negative_log_predictive_density(t=XT, y=YT)
posterior_mean, posterior_cov = model.predict(t=x_plot)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1-t0))
print('NLPD: %1.2f' % nlpd)

if save_result:
    with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp:
        pickle.dump(nlpd, fp)

# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp:
#     nlpd_show = pickle.load(fp)
# print(nlpd_show)

if plot_final:
    link = model.likelihood.link_fn
    lb = posterior_mean[:, 0] - np.sqrt(posterior_cov[:, 0, 0] + link(posterior_mean[:, 1]) ** 2) * 1.96
Example #3
0
    return opt_update(i, gradients, state)


print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(250):
    opt_state = gradient_step(j, opt_state, model)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1 - t0))

x_plot = np.linspace(np.min(x_test) - 5, np.max(x_test) + 5, 200)
# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
nlpd = model.negative_log_predictive_density(t=x_test, y=y_test)
posterior_mean, posterior_var = model.predict(t=x_plot)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1 - t0))
print('NLPD: %1.2f' % nlpd)

if save_result:
    with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt",
              "wb") as fp:
        pickle.dump(nlpd, fp)

# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp:
#     nlpd_show = pickle.load(fp)
# print(nlpd_show)

if plot_final:
    disaster_timings = pd.read_csv('../../../data/coal.txt',
        plot(mod, i)

    return opt_update(i, gradients, state)


print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(250):
    opt_state = gradient_step(j, opt_state, model)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1 - t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
posterior_mean, posterior_var, _, nlpd = model.predict(compute_nlpd=False)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1 - t0))

# x_pred = xrange * (model.t_all[:, 0] + xshift)
x_pred = X_scaler.inverse_transform(model.t_all[:, 0])
# X_rescale = xrange * (X + xshift)
link = model.likelihood.link_fn
lb = posterior_mean[:, 0, 0] - np.sqrt(posterior_var[:, 0, 0] +
                                       link(posterior_mean[:, 1, 0])**2) * 1.96
ub = posterior_mean[:, 0, 0] + np.sqrt(posterior_var[:, 0, 0] +
                                       link(posterior_mean[:, 1, 0])**2) * 1.96
post_mean = y_scaler.inverse_transform(posterior_mean[:, 0, 0])
lb = y_scaler.inverse_transform(lb)
ub = y_scaler.inverse_transform(ub)
Example #5
0
        plot(mod, i)

    return opt_update(i, gradients, state)


print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(100):
    opt_state = gradient_step(j, opt_state, model)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1 - t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
posterior_mean, posterior_cov, _, nlpd = model.predict()
t1 = time.time()
print('prediction time: %2.2f secs' % (t1 - t0))
print('test NLPD: %1.2f' % nlpd)

lb = posterior_mean[:, 0, 0] - 1.96 * posterior_cov[:, 0, 0]**0.5
ub = posterior_mean[:, 0, 0] + 1.96 * posterior_cov[:, 0, 0]**0.5
x_pred = model.t_all[:, 0]
test_id = model.test_id
t_test = model.t_all[test_id]

print('sampling from the posterior ...')
t0 = time.time()
posterior_samp = model.posterior_sample(20)
t1 = time.time()
print('sampling time: %2.2f secs' % (t1 - t0))
    sde_gp_model.y_train, sde_gp_model.dt_train, params, False, False, None,
    sde_gp_model.sites.site_params)
print(neg_log_marg_lik)
print(dlZ)

# print('optimising the hyperparameters ...')
# t0 = time.time()
# for j in range(20):
#     opt_state = gradient_step(j, opt_state, sde_gp_model)
# t1 = time.time()
# print('optimisation time: %2.2f secs' % (t1-t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
posterior_mean, posterior_var, _ = sde_gp_model.predict()
t1 = time.time()
print('prediction time: %2.2f secs' % (t1 - t0))

lb = posterior_mean[:, 0] - 1.96 * posterior_var[:, 0]**0.5
ub = posterior_mean[:, 0] + 1.96 * posterior_var[:, 0]**0.5
x_pred = sde_gp_model.t_all
test_id = sde_gp_model.test_id

print('sampling from the posterior ...')
t0 = time.time()
posterior_samp = sde_gp_model.posterior_sample(20)
t1 = time.time()
print('sampling time: %2.2f secs' % (t1 - t0))

print('plotting ...')
Example #7
0

plot_num = 0
mu_prev = None
print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(50):
    opt_state, plot_num, mu_prev = gradient_step(j, opt_state, model, plot_num,
                                                 mu_prev)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1 - t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
mu, var, _, nlpd_test = model.predict(return_full=True)
mu = np.squeeze(mu)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1 - t0))
# print('test NLPD: %1.2f' % nlpd)

x_pred = model.t_all
link_fn = model.likelihood.link_fn

# print('sampling from the posterior ...')
# t0 = time.time()
# posterior_samp = model.posterior_sample(20)
# t1 = time.time()
# print('sampling time: %2.2f secs' % (t1-t0))

print('plotting ...')
Example #8
0
        plot(mod, i)

    return opt_update(i, gradients, state)


print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(250):
    opt_state = gradient_step(j, opt_state, model)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1-t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
posterior_mean, posterior_var, _, nlpd = model.predict()
t1 = time.time()
print('prediction time: %2.2f secs' % (t1-t0))
print('NLPD: %1.2f' % nlpd)

with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp:
    pickle.dump(nlpd, fp)

# with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp:
#     nlpd_show = pickle.load(fp)
# print(nlpd_show)

# plt.figure(1)
# plt.plot(posterior_mean)
# plt.show()
Example #9
0
plot_num = 0
mu_prev = None
print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(50):
    opt_state, plot_num, mu_prev = gradient_step(j, opt_state, model, plot_num,
                                                 mu_prev)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1 - t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
# nlpd = model.negative_log_predictive_density(t=t, r=r, y=Y)
mu, var = model.predict(t=t, r=r)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1 - t0))
# print('test NLPD: %1.2f' % nlpd)

link_fn = model.likelihood.link_fn

print('plotting ...')
plt.figure(1, figsize=(10, 5))
# im = plt.imshow(mu.T, extent=[0, 1000, 0, 500], origin='lower')
im = plt.imshow(link_fn(mu).T / scale,
                extent=[0, 1000, 0, 500],
                origin='lower')
plt.colorbar(im, fraction=0.0235, pad=0.04)
plt.xlim(0, 1000)
plt.ylim(0, 500)

plot_num = 0
mu_prev = None
print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(50):
    opt_state, plot_num, mu_prev = gradient_step(j, opt_state, model, plot_num,
                                                 mu_prev)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1 - t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
mu, var = model.predict(t=Xtest, r=Rtest)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1 - t0))

link_fn = model.likelihood.link_fn

print('plotting ...')
plt.figure(1)
for label, mark in [[1, 'o'], [0, 'o']]:
    ind = Y[:, 0] == label
    # ax.plot(X[ind, 0], X[ind, 1], mark)
    plt.scatter(X[ind], R[ind], s=50, alpha=.5)
# ax.imshow(mu.T)
plt.contour(Xtest, Rtest, mu, levels=[.0], colors='k', linewidths=4.)
# plt.axis('equal')
plt.tick_params(axis='x',
Example #11
0
    prior_params = softplus_list(params[0])
    print('iter %2d: var=%1.2f len_time=%1.2f len_space=%1.2f, nlml=%2.2f' %
          (i, prior_params[0], prior_params[1], prior_params[2], neg_log_marg_lik))

    if plot_intermediate:
        plot_2d_classification(mod, i)
        # plot_num_, mu_prev_ = plot_2d_classification_filtering(mod, i, plot_num_, mu_prev_)

    return opt_update(i, gradients, state), plot_num_, mu_prev_


plot_num = 0
mu_prev = None
print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(0):
    opt_state, plot_num, mu_prev = gradient_step(j, opt_state, model, plot_num, mu_prev)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1-t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
posterior_mean, posterior_var, _, nlpd = model.predict(return_full=True)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1-t0))
print('test NLPD: %1.2f' % nlpd)

with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp:
    pickle.dump(nlpd, fp)
Example #12
0
# print('optimising the hyperparameters ...')
# t0 = time.time()
# for j in range(20):
#     opt_state = gradient_step(j, opt_state, sde_gp_model_1)
# t1 = time.time()
# print('optimisation time: %2.2f secs' % (t1-t0))

for i in range(5):
    model_1.run()
    model_2.run()

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
posterior_mean_1, posterior_var_1, _, nlpd1 = model_1.predict()
posterior_mean_2, posterior_var_2, _, nlpd2 = model_2.predict()
t1 = time.time()
print('prediction time: %2.2f secs' % (t1 - t0))
print(model_1.sites.site_params[0][100] - model_2.sites.site_params[0][100])
print(posterior_mean_1 - posterior_mean_2)

lb_1 = posterior_mean_1[:, 0] - 1.96 * posterior_var_1[:, 0]**0.5
ub_1 = posterior_mean_1[:, 0] + 1.96 * posterior_var_1[:, 0]**0.5
lb_2 = posterior_mean_2[:, 0] - 1.96 * posterior_var_2[:, 0]**0.5
ub_2 = posterior_mean_2[:, 0] + 1.96 * posterior_var_2[:, 0]**0.5
x_pred = model_1.t_all
test_id = model_1.test_id
t_test = model_1.t_all[test_id]
link_fn = model_1.likelihood.link_fn
Example #13
0
plot_num = 0
mu_prev = None
print('optimising the hyperparameters ...')
t0 = time.time()
for j in range(50):
    opt_state, plot_num, mu_prev = gradient_step(j, opt_state, model, plot_num,
                                                 mu_prev)
t1 = time.time()
print('optimisation time: %2.2f secs' % (t1 - t0))

# calculate posterior predictive distribution via filtering and smoothing at train & test locations:
print('calculating the posterior predictive distribution ...')
t0 = time.time()
# posterior_mean, posterior_cov, _, nlpd = model.predict()
mu, var, _, _ = model.predict(compute_nlpd=False)
mu = np.squeeze(mu)
t1 = time.time()
print('prediction time: %2.2f secs' % (t1 - t0))
# print('test NLPD: %1.2f' % nlpd)

link_fn = model.likelihood.link_fn

# print('sampling from the posterior ...')
# t0 = time.time()
# posterior_samp = model.posterior_sample(20)
# t1 = time.time()
# print('sampling time: %2.2f secs' % (t1-t0))

print('plotting ...')
plt.figure(1, figsize=(10, 5))