def run_case(): print('--> Load training set') x_t = sio.loadmat('../datasets/compact_uber.mat')['x_t'] # training inputs psi_t = sio.loadmat('../datasets/compact_uber.mat')['y_t'] - np.pi # training outputs n_data = np.alen(psi_t) print('--> Load validation set') x_v = sio.loadmat('../datasets/compact_uber.mat')['x_v'] # validation inputs psi_v = sio.loadmat('../datasets/compact_uber.mat')['y_v'] - np.pi # validation outputs print('--> Load prediction set') x_p = sio.loadmat('../datasets/compact_uber.mat')['x_p'] # prediction inputs n_pred = np.alen(x_p) n_totp = n_data + n_pred print('--> Learn GP') # Set kernel parameters psi_t = psi_t.reshape(n_data, 1) y_t = np.hstack((np.cos(psi_t), np.sin(psi_t))) x_t = x_t.reshape(n_data, 1) x_p = x_p.reshape(n_pred, 1) config = { 'xi': x_t, 'y': y_t, } ell2 = 0.15 ** 2 s2 = 400. noise = 1.E-6 hyp = np.zeros([3, 1]) hyp[0] = ell2 hyp[1] = s2 hyp[2] = noise hyp = np.log(hyp.flatten()) ans = gp.learning_se_iso(hyp, config) print ans.message hyp = np.exp(ans.x) ell2 = hyp[0] s2 = hyp[1] noise = hyp[2] params = { 'ell2': ell2, 's2': s2 } print('--> Initialising model variables') t0 = time() yp, var_yp = gp.predict_se_iso(y_t, x_t, x_p, params, noise) tf = time() print 'Total elapsed time in prediction: ' + str(tf - t0) + ' s' # Keep all values between -pi and pi s2_p = np.diag(var_yp) holl_score = 0. for ii in xrange(0, n_pred): holl_score += uc.loglik_gp2circle(psi_v[ii, 0], yp[ii], s2_p[ii]) print 'HOLL score: ' + str(holl_score) print('Finished running case!')
def run_case(): x = np.linspace(0, 1, 100) y = toy_fun(x) print('--> Create training set') x_t = np.array([ +0.05, +0.05, +0.17, +0.17, +0.22, +0.30, +0.35, +0.37, +0.52, +0.53, +0.69, +0.70, +0.82, +0.90 ]) psi_t = np.array([ +0.56, +0.65, +0.90, +1.18, +2.39, +3.40, +2.89, +2.64, -2.69, -3.20, -3.40, -2.77, +0.41, +0.35 ]) x_v = x psi_v = y n_data = np.alen(x_t) print('--> Create prediction set') grid_pts = 100 x_p = np.linspace(0, 1, grid_pts) n_pred = np.alen(x_p) n_totp = n_data + n_pred print('--> Learn GP') # Set kernel parameters psi_t = psi_t.reshape(n_data, 1) y_t = np.hstack((np.cos(psi_t), np.sin(psi_t))) x_t = x_t.reshape(n_data, 1) x_p = x_p.reshape(n_pred, 1) config = { 'xi': x_t, 'y': y_t, } ell2 = 1.15**2 s2 = 700. noise = 2.E-3 hyp = np.zeros([3, 1]) hyp[0] = ell2 hyp[1] = s2 hyp[2] = noise hyp = np.log(hyp.flatten()) ans = gp.learning_se_iso(hyp, config) print ans.message hyp = np.exp(ans.x) ell2 = hyp[0] s2 = hyp[1] noise = hyp[2] params = {'ell2': ell2, 's2': s2} print('--> Initialising model variables') t0 = time() yp, var_yp = gp.predict_se_iso(y_t, x_t, x_p, params, noise) tf = time() print 'Total elapsed time in prediction: ' + str(tf - t0) + ' s' # Keep all values between -pi and pi z_p = yp[:, 0] + 1.j * yp[:, 1] s2_p = np.diag(var_yp) new_psi_p = np.angle(z_p) n_grid = 1000 p, th = gp.get_predictive_for_wrapped(new_psi_p, var_yp, res=n_grid) # Predictions print('--> Saving and displaying results') # First the heatmap in the background fig, scaling_x, scaling_y, offset_y = plot.circular_error_bars(th, p, True) # Then scale the predicted and training sets to match the dimensions of the heatmap scaled_x_t = x_t * scaling_x scaled_y_t = uc.cfix(psi_t) * scaling_y + offset_y scaled_x_p = x_p * scaling_x scaled_y_p = uc.cfix(new_psi_p) * scaling_y + offset_y scaled_x_v = x_v * scaling_x scaled_y_v = uc.cfix(psi_v) * scaling_y + offset_y # Now plot the optimised psi's and datapoints plot.plot(scaled_x_p, scaled_y_p, 'c.') # optimised prediction plot.plot(scaled_x_t, scaled_y_t, 'xk', mew=2.0) # training set plot.plot(scaled_x_v, scaled_y_v, 'ob', fillstyle='none') # held out set plot.ylabel('Regressed variable $(\psi)$') plot.xlabel('Input variable $(x)$') plot.tight_layout() plot.grid(True) holl_score = 0. for ii in xrange(0, n_pred): holl_score += uc.loglik_gp2circle(psi_v[ii], yp[ii], s2_p[ii]) print 'HOLL score: ' + str(holl_score) print('Finished running case!')
def run_case(): print('--> Load data set') data = list() with open('../datasets/Spellman-alpha.csv', 'rb') as csvfile: reader = csv.reader(csvfile) for row in reader: data.append(row) data = np.array(data[1:]).astype(np.float_) train_idx = np.arange(0, 18, 2) valid_idx = np.arange(1, 18, 2) print('--> Preparing training set') x_t = data[train_idx, 1:] # training inputs psi_t = data[train_idx, 0] # training outputs n_data = np.alen(psi_t) d_input = x_t.shape[1] print('--> Preparing validation set') x_v = data[valid_idx, 1:] # validation inputs psi_v = data[valid_idx, 0].reshape(len(valid_idx), 1) # validation outputs print('--> Load prediction set') x_p = x_v # prediction inputs n_pred = x_p.shape[0] n_pred = np.alen(x_p) n_totp = n_data + n_pred print('--> Learn GP') # Set kernel parameters psi_t = psi_t.reshape(n_data, 1) y_t = np.hstack((np.cos(psi_t), np.sin(psi_t))) x_t = x_t.reshape(n_data, d_input) x_p = x_p.reshape(n_pred, d_input) config = { 'xi': x_t, 'y': y_t, } ell2 = 0.15 ** 2 s2 = 400. noise = 1.E-6 hyp = np.zeros([3, 1]) hyp[0] = ell2 hyp[1] = s2 hyp[2] = noise hyp = np.log(hyp.flatten()) ans = gp.learning_se_iso(hyp, config) print ans.message hyp = np.exp(ans.x) ell2 = hyp[0] s2 = hyp[1] noise = hyp[2] params = { 'ell2': ell2, 's2': s2 } print('--> Initialising model variables') t0 = time() yp, var_yp = gp.predict_se_iso(y_t, x_t, x_p, params, noise) tf = time() print 'Total elapsed time in prediction: ' + str(tf - t0) + ' s' s2_p = np.diag(var_yp) holl_score = 0. for ii in xrange(0, n_pred): holl_score += np.sum(-0.5 * (yp[ii] - psi_v[ii]) ** 2 / s2_p[ii] - 0.5 * np.log(s2_p[ii] * 2. * np.pi)) print 'HOLL score: ' + str(holl_score) print('Finished running case!')
def run_case(): print('--> Load training set') x_t = sio.loadmat('../datasets/simpletide.mat')['x_t'] # training inputs y_t = sio.loadmat('../datasets/simpletide.mat')['y_t'] - np.pi # training outputs pid_t = sio.loadmat('../datasets/simpletide.mat')['pid_t'] # port ids for training set n_data = np.alen(y_t) print('--> Load validation set') x_v = sio.loadmat('../datasets/simpletide.mat')['x_v'] # validation inputs y_v = sio.loadmat('../datasets/simpletide.mat')['y_v'] - np.pi # validation outputs pid_v = sio.loadmat('../datasets/simpletide.mat')['pid_v'] # port ids for validation set print('--> Load prediction set') x_p = sio.loadmat('../datasets/simpletide.mat')['x_p'] # prediction inputs pid_p = sio.loadmat('../datasets/simpletide.mat')['pid_p'] # port ids for prediction set n_pred = np.alen(x_p) n_totp = n_data + n_pred print('--> Learn GP') # Set kernel parameters y_t = y_t.reshape(n_data, 1) x_t = x_t.reshape(n_data, 2) x_p = x_p.reshape(n_pred, 2) config = { 'xi': x_t, 'y': y_t, } ell2 = 0.15 ** 2 s2 = 400. noise = 1.E-6 hyp = np.zeros([3, 1]) hyp[0] = ell2 hyp[1] = s2 hyp[2] = noise hyp = np.log(hyp.flatten()) ans = gp.learning_se_iso(hyp, config) print ans.message hyp = np.exp(ans.x) ell2 = hyp[0] s2 = hyp[1] noise = hyp[2] params = { 'ell2': ell2, 's2': s2 } print('--> Initialising model variables') t0 = time() y_p, var_y_p = gp.predict_se_iso(y_t, x_t, x_p, params, noise) tf = time() s2_p = np.diag(var_y_p) print 'Total elapsed time in prediction: ' + str(tf - t0) + ' s' # Keep all values between -pi and pi new_psi_p = uc.cfix(y_p) print('--> Calculating scores') holl_score = 0. for ii in xrange(0, n_pred): holl_score += np.sum(-0.5 * (new_psi_p[ii] - y_v[ii]) ** 2 / s2_p[ii] - 0.5 * np.log(s2_p[ii] * 2. * np.pi)) print 'HOLL score: ' + str(holl_score) print('Finished running case!')
def run_case(): print('--> Create training set') x = np.linspace(0, 1, 100) y = toy_fun(x) x_t = np.array([ +0.05, +0.05, +0.17, +0.17, +0.22, +0.30, +0.35, +0.37, +0.52, +0.53, +0.69, +0.70, +0.82, +0.90 ]) y_t = np.array([ +0.56, +0.65, +0.90, +1.18, +2.39, +3.40, +2.89, +2.64, -2.69, -3.20, -3.40, -2.77, +0.41, +0.35 ]) x_v = x y_v = y y_t = uc.cfix(y_t) y_v = uc.cfix(y_v) n_data = np.alen(x_t) print('--> Create prediction set') grid_pts = 100 x_p = np.linspace(0, 1, grid_pts) n_pred = np.alen(x_p) n_totp = n_data + n_pred print('--> Learn GP') # Set kernel parameters y_t = y_t.reshape(n_data, 1) x_t = x_t.reshape(n_data, 1) x_p = x_p.reshape(n_pred, 1) config = { 'xi': x_t, 'y': y_t, } ell2 = 0.5**2 s2 = 200. noise = 1.E-4 hyp = np.zeros([3, 1]) hyp[0] = ell2 hyp[1] = s2 hyp[2] = noise hyp = np.log(hyp.flatten()) ans = gp.learning_se_iso(hyp, config) print ans.message hyp = np.exp(ans.x) ell2 = hyp[0] s2 = hyp[1] noise = hyp[2] params = {'ell2': ell2, 's2': s2} print('--> Initialising model variables') t0 = time() yp, var_yp = gp.predict_se_iso(y_t, x_t, x_p, params, noise) tf = time() print 'Total elapsed time in prediction: ' + str(tf - t0) + ' s' new_psi_p = yp s2_p = np.diag(var_yp) p, th = gp.get_predictive_for_plots_1d(yp, var_yp, res=1000) # Predictions print('--> Saving and displaying results') # First the heatmap in the background fig, scaling_x, scaling_y, offset_y = plot.circular_error_bars(th, p, True) # Then scale the predicted and training sets to match the dimensions of the heatmap scaled_x_t = x_t * scaling_x scaled_y_t = uc.cfix(y_t) * scaling_y + offset_y scaled_x_v = x_v * scaling_x scaled_y_v = uc.cfix(y_v) * scaling_y + offset_y scaled_x_p = x_p * scaling_x scaled_y_p = new_psi_p * scaling_y + offset_y # scaled_mode = (mode + np.pi) * 1000 / (2 * np.pi) # Now plot the optimised psi's and datapoints plot.plot(scaled_x_p, scaled_y_p, 'c.') # optimised prediction plot.plot(scaled_x_t, scaled_y_t, 'xk', mew=2.0) # training set plot.plot(scaled_x_v, scaled_y_v, 'ob', fillstyle='none') # training set plot.ylabel('Regressed variable $(\psi)$') plot.xlabel('Input variable $(x)$') plot.tight_layout() holl_score = 0. for ii in xrange(0, y_v.shape[0]): holl_score += -np.sum(0.5 * (yp[ii] - y_v[ii])**2 / s2_p[ii] - 0.5 * np.log(s2_p[ii] * 2. * np.pi)) print 'HOLL Score: ' + str(holl_score) print('Finished running case!')