import wbml.plot from stheno import GP, EQ, model, Obs # Define points to predict at. x = np.linspace(0, 10, 100) # Construct a prior. f1 = GP(EQ(), 3) f2 = GP(EQ(), 3) # Compute the approximate product. f_prod = f1 * f2 # Sample two functions. s1, s2 = model.sample(f1(x), f2(x)) # Predict. mean, lower, upper = (f_prod | ((f1(x), s1), (f2(x), s2)))(x).marginals() # Plot result. plt.plot(x, s1, label='Sample 1', c='tab:red') plt.plot(x, s2, label='Sample 2', c='tab:blue') plt.plot(x, s1 * s2, label='True product', c='tab:orange') plt.plot(x, mean, label='Approximate posterior', c='tab:green') plt.plot(x, lower, ls='--', c='tab:green') plt.plot(x, upper, ls='--', c='tab:green') wbml.plot.tweak() plt.savefig('readme_example9_product.png') plt.show()
import numpy as np import wbml.plot from stheno import GP, EQ, Delta, model # Define points to predict at. x = np.linspace(0, 10, 100) x_obs = np.linspace(0, 7, 20) # Construct a prior. f = GP(EQ().periodic(5.)) # Latent function. e = GP(Delta()) # Noise. y = f + .5 * e # Sample a true, underlying function and observations. f_true, y_obs = model.sample(f(x), y(x_obs)) # Now condition on the observations to make predictions. mean, lower, upper = (f | (y(x_obs), y_obs))(x).marginals() # Plot result. plt.plot(x, f_true, label='True', c='tab:blue') plt.scatter(x_obs, y_obs, label='Observations', c='tab:red') plt.plot(x, mean, label='Prediction', c='tab:green') plt.plot(x, lower, ls='--', c='tab:green') plt.plot(x, upper, ls='--', c='tab:green') wbml.plot.tweak() plt.savefig('readme_example1_simple_regression.png') plt.show()
# noise=args.noise) model = GPARRegressor( scale=[2., 0.5], scale_tie=True, linear=True, linear_scale=10., input_linear=False, nonlinear=False, # missing non linear inputs now? markov=1, replace=True, noise=args.noise) n = 3 y = model.sample(transform_x(x), p=n, latent=args.latent) plt.figure(figsize=(20, 10)) cs = ['tab:red', 'tab:blue', 'tab:green', 'tab:pink', 'tab:cyan'] for i in range(y.shape[1]): plt.subplot(2, 5, i + 1) plt.title('Output {}'.format(i + 1)) for j, c in enumerate(cs): inds = x[:, 0] == j + 1 plt.semilogx(x[inds, 1], y[inds, i], label='{} layers'.format(j + 1), c=c)
# Let the observation noise consist of a bit of exponential noise. e_indep = GP(Delta()) e_exp = GP(Exp()) e = e_indep + .3 * e_exp # Sum the latent function and observation noise to get a model for the # observations. y = f + .5 * e # Sample a true, underlying function and observations. f_true_smooth, f_true_wiggly, f_true_periodic, f_true_linear, f_true, y_obs = \ model.sample(f_smooth(x), f_wiggly(x), f_periodic(x), f_linear(x), f(x), y(x_obs)) # Now condition on the observations and make predictions for the latent # function and its various components. f_smooth, f_wiggly, f_periodic, f_linear, f = \ (f_smooth, f_wiggly, f_periodic, f_linear, f) | Obs(y(x_obs), y_obs) pred_smooth = f_smooth(x).marginals() pred_wiggly = f_wiggly(x).marginals() pred_periodic = f_periodic(x).marginals() pred_linear = f_linear(x).marginals() pred_f = f(x).marginals()
def sample(self, x): return model.sample(*(p(x) for p in self.ps))
# Define points to predict at. x = np.linspace(0, 10, 200) x_obs = np.linspace(0, 10, 10) # Construct the model. slope = GP(1) intercept = GP(5) f = slope * (lambda x: x) + intercept e = 0.2 * GP(Delta()) # Noise model y = f + e # Observation model # Sample a slope, intercept, underlying function, and observations. true_slope, true_intercept, f_true, y_obs = \ model.sample(slope(0), intercept(0), f(x), y(x_obs)) # Condition on the observations to make predictions. slope, intercept, f = (slope, intercept, f) | Obs(y(x_obs), y_obs) mean, lower, upper = f(x).marginals() print('true slope', true_slope) print('predicted slope', slope(0).mean) print('true intercept', true_intercept) print('predicted intercept', intercept(0).mean) # Plot result. plt.plot(x, f_true, label='True', c='tab:blue') plt.scatter(x_obs, y_obs, label='Observations', c='tab:red') plt.plot(x, mean, label='Prediction', c='tab:green') plt.plot(x, lower, ls='--', c='tab:green')