Beispiel #1
0
	# ax2.axvline(next_point, color=color, ls='--', alpha=0.5)


if __name__ == '__main__':
	from HyperSphere.GP.kernels.modules.squared_exponential import SquaredExponentialKernel
	from HyperSphere.GP.models.gp_regression import GPRegression
	from HyperSphere.GP.inference.inference import Inference
	import matplotlib.pyplot as plt

	ndata = 6
	ndim = 1
	model_for_generating = GPRegression(kernel=SquaredExponentialKernel(ndim))
	train_x = Variable(torch.FloatTensor(ndata, ndim).uniform_(-2, 2))
	chol_L = torch.potrf(
		(model_for_generating.kernel(train_x) + torch.diag(model_for_generating.likelihood(train_x))).data, upper=False)
	train_y = model_for_generating.mean(train_x) + Variable(torch.mm(chol_L, torch.randn(ndata, 1)))
	# train_y = torch.sin(2 * math.pi * torch.sum(train_x, 1, keepdim=True)) + Variable(torch.FloatTensor(train_x.size(0), 1).normal_())
	train_data = (train_x, train_y)
	param_original = model_for_generating.param_to_vec()
	reference = torch.min(train_y.data)

	model_for_learning = GPRegression(kernel=SquaredExponentialKernel(ndim))
	inference = Inference(train_data, model_for_learning)
	model_for_learning.vec_to_param(param_original)
	param_samples_learning = inference.learning(n_restarts=10)
	model_for_learning.vec_to_param(param_original)
	param_samples_sampling = inference.sampling(n_sample=5, n_burnin=200, n_thin=10)

	if ndim == 1:
		ax11 = plt.subplot(221)
		ax11.plot(train_x.data.numpy().flatten(), train_y.data.numpy().flatten(), 'k*')
Beispiel #2
0
    ax.set_title(title_str + '\n%.4E' % nll)


if __name__ == '__main__':
    from HyperSphere.GP.kernels.modules.squared_exponential import SquaredExponentialKernel
    from HyperSphere.GP.models.gp_regression import GPRegression
    import matplotlib.pyplot as plt
    ndata = 20
    ndim = 1
    model_for_generating = GPRegression(kernel=SquaredExponentialKernel(ndim))
    train_x = Variable(torch.FloatTensor(ndata, ndim).uniform_(-2, 2))
    chol_L = torch.potrf(
        (model_for_generating.kernel(train_x) +
         torch.diag(model_for_generating.likelihood(train_x))).data,
        upper=False)
    train_y = model_for_generating.mean(train_x) + Variable(
        torch.mm(chol_L, torch.randn(ndata, 1)))
    train_data = (train_x, train_y)
    param_original = model_for_generating.param_to_vec()
    generated_nll = Inference(
        train_data, model_for_generating).negative_log_likelihood().data[0, 0]

    model_for_learning = GPRegression(kernel=SquaredExponentialKernel(ndim))
    inference = Inference(train_data, model_for_learning)
    model_for_learning.vec_to_param(param_original)
    param_samples_learning = inference.learning(n_restarts=10)
    model_for_learning.vec_to_param(param_original)
    param_samples_sampling = inference.sampling()

    if ndim == 1:
        pred_x = torch.linspace(-2.5, 2.5, 100).view(-1, 1)