예제 #1
0
Y = np.array([p[1] for p in data])

# Normalize the Y dimension.
mean_Y = np.mean(Y)
std_Y = np.std(Y)
Y = (Y - mean_Y) / std_Y

# Fit a Gaussian Process to the data points.
lengthscale = 40
signal_variance = 3.
noise_variance = 0.1
X_star = np.linspace(0, 960, 50)
kernel = SquaredExponentialKernel(lengthscale=lengthscale,
                                  signal_variance=signal_variance)
gp = GP(kernel, noise_variance=noise_variance)
post_m, post_var, weights = gp.posterior(X, Y, X_star)

# Plot results.
color = 'yellow'
ax.plot(X_star, post_m * std_Y + mean_Y, color=color)
ax.scatter(X, Y * std_Y + mean_Y, s=30, color=color)

post_var = np.diagonal(post_var)

plt.fill_between(X_star, (post_m - 1.96 * np.sqrt(post_var)) * std_Y + mean_Y,
                 (post_m + 1.96 * np.sqrt(post_var)) * std_Y + mean_Y,
                 color=color,
                 alpha=0.2)

plt.xlim(0, 960)
plt.ylim(500, 100)
X, Y = training_data

lengthscale = 2.1
signal_variance = 1.
noise_variance = 0.01

# Setup testing locations.
# You can change the testing locations here.
X_star = np.linspace(training_start, training_end + 4 * np.pi, 200)

X_star_few = np.linspace(training_start, training_end + 0 * np.pi, 50)
# Compute posterior mean and variance.
kernel = SquaredExponentialKernel(lengthscale=lengthscale,
                                  signal_variance=signal_variance)
gp = GP(kernel=kernel, noise_variance=noise_variance)
post_m, post_variance, weights = gp.posterior(X, Y, X_star)

post_m_few, post_variance_few, weights_few = gp.posterior(X, Y, X_star_few)

# Plot posterior mean and variance.
post_variance = np.diagonal(post_variance)
plt.plot(X_star, post_m, color='red')
plt.scatter(X_star_few,
            post_m_few,
            marker='.',
            s=[60] * len(X_star),
            color='red',
            alpha=0.7)
plt.fill_between(X_star,
                 post_m - 1.96 * np.sqrt(post_variance),
                 post_m + 1.96 * np.sqrt(post_variance),