def _build(self): self.params = {} self.latent_values = None # Build the transformer beta_warp = BetaWarp(self.num_dims) beta_alpha, beta_beta = beta_warp.hypers self.params['beta_alpha'] = beta_alpha self.params['beta_beta'] = beta_beta transformer = Transformer(self.num_dims) transformer.add_layer(beta_warp) # Build the component kernels input_kernel = Matern52(self.num_dims) ls = input_kernel.hypers self.params['ls'] = ls # Now apply the transformation. transform_kernel = TransformKernel(input_kernel, transformer) # Add some perturbation for stability stability_noise = Noise(self.num_dims) # Finally make a noisy version if necessary # In a classifier GP the notion of "noise" is really just the scale. if self.noiseless: self._kernel = SumKernel(transform_kernel, stability_noise) else: scaled_kernel = Scale(transform_kernel) self._kernel = SumKernel(scaled_kernel, stability_noise) amp2 = scaled_kernel.hypers self.params['amp2'] = amp2 # Build the mean function (just a constant mean for now) self.mean = Hyperparameter( initial_value = 0.0, prior = priors.Gaussian(0.0,1.0), name = 'mean' ) self.params['mean'] = self.mean # Buld the latent values. Empty for now until the GP gets data. self.latent_values = Hyperparameter( initial_value = np.array([]), name = 'latent values' ) # Build the samplers to_sample = [self.mean] if self.noiseless else [self.mean, amp2] self._samplers.append(SliceSampler(*to_sample, compwise=False, thinning=self.thinning)) self._samplers.append(WhitenedPriorSliceSampler(ls, beta_alpha, beta_beta, compwise=True, thinning=self.thinning)) self.latent_values_sampler = EllipticalSliceSampler(self.latent_values, thinning=self.ess_thinning)
def _build(self): # Build the transformer beta_warp = BetaWarp(self.num_dims) transformer = Transformer(self.num_dims) transformer.add_layer(beta_warp) # Build the component kernels input_kernel = Matern52(self.num_dims) stability_noise_kernel = Noise(self.num_dims) # Even if noiseless we use some noise for stability scaled_input_kernel = Scale(input_kernel) sum_kernel = SumKernel(scaled_input_kernel, stability_noise_kernel) noise_kernel = Noise(self.num_dims) # The final kernel applies the transformation. self._kernel = TransformKernel(sum_kernel, transformer) # Finally make a noisy version if necessary if not self.noiseless: self._kernel_with_noise = SumKernel(self._kernel, noise_kernel) # Build the mean function (just a constant mean for now) self.mean = Hyperparameter( initial_value = 0.0, prior = priors.Gaussian(0.0,1.0), name = 'mean' ) # Get the hyperparameters to sample ls = input_kernel.hypers amp2 = scaled_input_kernel.hypers beta_alpha, beta_beta = beta_warp.hypers self.params = { 'mean' : self.mean, 'amp2' : amp2, 'ls' : ls, 'beta_alpha' : beta_alpha, 'beta_beta' : beta_beta } # Build the samplers if self.noiseless: self._samplers.append(SliceSampler(self.mean, amp2, compwise=False, thinning=self.thinning)) else: noise = noise_kernel.hypers self.params.update({'noise' : noise}) self._samplers.append(SliceSampler(self.mean, amp2, noise, compwise=False, thinning=self.thinning)) self._samplers.append(SliceSampler(ls, beta_alpha, beta_beta, compwise=True, thinning=self.thinning))
def __init__(self, num_dims, weights=None, num_factors=2, name="Linear"): self.name = name self.num_dims = num_dims self.num_factors = num_factors if not weights else int(weights.shape[0] / num_dims) if weights: assert self.num_factors*self.num_dims == weights.shape[0] else: default_weights = Hyperparameter( initial_value = 0.1*np.random.randn(num_dims*num_factors), prior = priors.Gaussian(0,1), name = 'weights' ) self.weights = weights if weights is not None else default_weights
if __name__ == '__main__': sys.path.append('..') from utils import priors import matplotlib.pyplot as plt n = 10000 # Test on 1D Gaussian x_samples = np.zeros(n) x = np.zeros(1) gsn = priors.Gaussian(mu=-1, sigma=4) for i in xrange(n): if i % 1000 == 0: print('Sample %d/%d' % (i, n)) x, cur_ll = slice_sample(x, gsn.logprob) x_samples[i] = x.copy() print('1D Gaussian actual mean: %f, mean of samples: %f' % (-1, np.mean(x_samples))) print('1D Gaussian actual sigma: %f, std of samples: %f' % (4, np.std(x_samples))) plt.figure(1) plt.clf()