class KernelExpFiniteGaussianSurrogate(StaticSurrogate): def __init__(self, ndim, sigma, lmbda, m): self.surrogate = KernelExpFiniteGaussian(sigma, lmbda, m, D=ndim) def train(self, samples): self.surrogate.fit(samples) def log_pdf_gradient(self, x): return self.surrogate.grad(x)
surrogate = KernelExpFiniteGaussian(sigma=sigma, lmbda=lmbda, m=m, D=benchmark_samples.shape[1]) surrogate.fit(benchmark_samples) fake = empty_class() def replace_2(x_2d, a, i, j): a = a.copy() a[i] = x_2d[0] a[j] = x_2d[1] return a for i in range(benchmark_samples.shape[1]): for j in range(benchmark_samples.shape[1]): if i == j: continue fake.log_pdf = lambda x_2d: surrogate.log_pdf(replace_2(x_2d, true_mean, i, j)) fake.grad = lambda x_2d: surrogate.grad(replace_2(x_2d, true_mean, i, j)) visualise_fit_2d(fake, benchmark_samples[:, [i, j]], Xs=np.linspace(benchmark_samples[:, i].min(), benchmark_samples[:, i].max(), 30), Ys=np.linspace(benchmark_samples[:, j].min(), benchmark_samples[:, j].max(), 30), ) plt.show() plt.plot(log2_sigmas, Js_mean, 'b-') plt.plot(log2_sigmas, Js_mean - 2 * np.sqrt(Js_var[i]), 'b--') plt.plot(log2_sigmas, Js_mean + 2 * np.sqrt(Js_var[i]), 'b--') plt.show()
surrogate.fit(benchmark_samples) fake = empty_class() def replace_2(x_2d, a, i, j): a = a.copy() a[i] = x_2d[0] a[j] = x_2d[1] return a for i in range(benchmark_samples.shape[1]): for j in range(benchmark_samples.shape[1]): if i == j: continue fake.log_pdf = lambda x_2d: surrogate.log_pdf( replace_2(x_2d, true_mean, i, j)) fake.grad = lambda x_2d: surrogate.grad( replace_2(x_2d, true_mean, i, j)) visualise_fit_2d( fake, benchmark_samples[:, [i, j]], Xs=np.linspace(benchmark_samples[:, i].min(), benchmark_samples[:, i].max(), 30), Ys=np.linspace(benchmark_samples[:, j].min(), benchmark_samples[:, j].max(), 30), ) plt.show() plt.plot(log2_sigmas, Js_mean, 'b-') plt.plot(log2_sigmas, Js_mean - 2 * np.sqrt(Js_var[i]), 'b--') plt.plot(log2_sigmas, Js_mean + 2 * np.sqrt(Js_var[i]), 'b--') plt.show()