return opt_update(i, gradients, state) print('optimising the hyperparameters ...') t0 = time.time() for j in range(250): opt_state = gradient_step(j, opt_state, model) t1 = time.time() print('optimisation time: %2.2f secs' % (t1-t0)) x_plot = np.linspace(np.min(Xall)-0.2, np.max(Xall)+0.2, 200) # calculate posterior predictive distribution via filtering and smoothing at train & test locations: print('calculating the posterior predictive distribution ...') t0 = time.time() nlpd = model.negative_log_predictive_density(t=XT, y=YT) posterior_mean, posterior_cov = model.predict(t=x_plot) t1 = time.time() print('prediction time: %2.2f secs' % (t1-t0)) print('NLPD: %1.2f' % nlpd) if save_result: with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: pickle.dump(nlpd, fp) # with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: # nlpd_show = pickle.load(fp) # print(nlpd_show) if plot_final: link = model.likelihood.link_fn
return opt_update(i, gradients, state) print('optimising the hyperparameters ...') t0 = time.time() num_iters = 500 for j in range(num_iters): opt_state = gradient_step(j, opt_state, model) t1 = time.time() print('optimisation time: %2.2f secs' % (t1-t0)) # calculate posterior predictive distribution via filtering and smoothing at train & test locations: print('calculating the posterior predictive distribution ...') t0 = time.time() nlpd = model.negative_log_predictive_density(t=x_test, y=y_test) t1 = time.time() print('prediction time: %2.2f secs' % (t1-t0)) print('test NLPD: %1.2f' % nlpd) if save_result: with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "wb") as fp: pickle.dump(nlpd, fp) # with open("output/" + str(method) + "_" + str(fold) + "_nlpd.txt", "rb") as fp: # nlpd_show = pickle.load(fp) # print(nlpd_show) # lb = posterior_mean[:, 0] - 1.96 * posterior_var[:, 0]**0.5 # ub = posterior_mean[:, 0] + 1.96 * posterior_var[:, 0]**0.5 # x_pred = model.t_all[:, 0]