def plot_prediction_fixed_hparams(s, mus, alpha, beta): # generating design matrix Phi = gen_desmat_gaussian(X, params={'s': s, 'mus': mus}) Phi_test = gen_desmat_gaussian(Xcont, params={'s': s, 'mus': mus}) est = BayesianRidgeRegression(alpha=alpha, beta=beta) est.fit(Phi, t, optimize_hyperparams=False) pred_mean, pred_std = est.predict(Phi_test, return_std=True) print(est.m) plot_result(pred_mean, pred_std)
def crossval_test_nll(s_u): nlls = [] for i in range(num_splits): # make design matrices for training data for this fold Phi_tr = gen_desmat_gaussian(X_tr_kf[i], params={ 's': s_w, 'mus': mus_w }) Psi_tr = gen_desmat_gaussian(X_tr_kf[i], params={ 's': s_u, 'mus': mus_u }) # train on this fold print('minimizing s_u = %s, fold %s' % (s_u, i)) est.fit(Phi_tr, Psi_tr, t_tr_kf[i], method='nelder-mead', logging=False) # make design matrices for test data for this fold Phi_ts = gen_desmat_gaussian(X_ts_kf[i], params={ 's': s_w, 'mus': mus_w }) Psi_ts = gen_desmat_gaussian(X_ts_kf[i], params={ 's': s_u, 'mus': mus_u }) # Predict at test points t_pred, loginvvar_pred = est.predict(Phi_ts, Psi_ts, noise_est=True) # inverse variance beta at test points beta_pred = np.exp(loginvvar_pred) # neg log-likelihood of test data for this fold nlls.append(neg_log_like(t_pred, t_ts_kf[i], beta_pred)) nll_av = np.mean(nlls) print('nll_av = %s\n' % nll_av) return nll_av
#%% setup Gaussian basis function params # number of basis function for y(x), log inv var M_w, M_u = 4, 4 # mus are centers of gbfs, s is spacing between gbfs # 4 w gbfs, w/ edge gbfs at s_w/2 from boundary of data. mus_w = np.array([-2.25, -0.75, 0.75, 2.25]) s_w = 1.5 # 4 u gbfs, w/ edge gbfs at s_u/2 from boundary of data. mus_u = np.array([-2.25, -0.75, 0.75, 2.25]) s_u = 2.25 #%% # generate y(x) gbf design matrix Phi from input data X Phi = gen_desmat_gaussian(X, params={'s': s_w, 'mus': mus_w}) # generate log(beta) gbf design matrix Psi from input data X Psi = gen_desmat_gaussian(X, params={'s': s_u, 'mus': mus_u}) #%% init estimator est = BayesianHetRegression() #%% use fit and predict methods est.fit(Phi, Psi, t, method='sgd', logging=True) u_weights, w_weights = est.u_fit, est.w_fit t_gbfs = gen_desmat_gaussian(Xcont, params={'s': s_w, 'mus': mus_w}) loginvvar_gbfs = gen_desmat_gaussian(Xcont, params={'s': s_u, 'mus': mus_u}) t_gbfs_total, loginvvar_gbfs_total = est.predict(t_gbfs, loginvvar_gbfs,
def make_Phi_Psi(X, s, mus): Phi_mat = np.vstack((np.ones(len(X)), X)).T Psi_mat = gen_desmat_gaussian(X, params={'s': s, 'mus': mus}) return Phi_mat, Psi_mat
def plot_prediction_fixed_hparams(s, mus, alpha, beta): # generating design matrix Phi = gen_desmat_gaussian(X, params={'s': s, 'mus': mus}) Phi_test = gen_desmat_gaussian(Xcont, params={'s': s, 'mus': mus}) est = BayesianRidgeRegression(alpha=alpha, beta=beta) est.fit(Phi, t, optimize_hyperparams=False) pred_mean, pred_std = est.predict(Phi_test, return_std=True) print(est.m) plot_result(pred_mean, pred_std) plot_prediction_fixed_hparams(s, mus, alpha=1.0, beta=1.0) #%% Phi = gen_desmat_gaussian(X, params={'s': s, 'mus': mus}) Phi_test = gen_desmat_gaussian(Xcont, params={'s': s, 'mus': mus}) est = BayesianRidgeRegression(alpha=1.0, beta=1.0) est.fit(Phi, t, optimize_hyperparams=False) pred_mean, pred_std = est.predict(Phi_test, return_std=True) w_means = est.m #%% # NEW t_gbfs = gen_desmat_gaussian(Xcont, params={'s': s, 'mus': mus}) # weighted sum of gbfs. should equal predictive mean t_gbfs_total = t_gbfs @ w_means # plot the gbfs def plot_result_with_gbf(pred_mean, pred_std):