def sample_fun(self, model, **sampler_options):
        params_array = hyperparameter_utils.params_to_array(self.params)

        if model.has_data:
            K_XX      = model.noiseless_kernel.cov(model.inputs)
            current_L = spla.cholesky(K_XX, lower=True)
            nu        = spla.solve_triangular(current_L, model.latent_values.value-model.mean.value, lower=True)
        else:
            nu = None # if no data

        new_params, current_ll = slice_sample(params_array, self.logprob, model, nu, **sampler_options)

        new_latent_values = self._compute_implied_y(model, nu)

        return new_params, new_latent_values, current_ll
Exemple #2
0
 def sample(self, model):
     """generate a new sample of parameters for the model
     
     Notes
     -----
     The parameters are stored as self.params which is a list of Params objects.  
     The values of the parameters are updated on each call.  Pesumably the value of 
     the parameter affects the model (this is not required, but it would be a bit 
     pointless othewise) 
     
     """
     # turn self.params into a 1d numpy array
     params_array = hyperparameter_utils.params_to_array(self.params)
     for i in range(self.thinning + 1):
         # get a new value for the parameter array via slice sampling
         params_array, current_ll = slice_sample(params_array, self.logprob, model, **self.sampler_options)
         hyperparameter_utils.set_params_from_array(self.params, params_array) # Can this be untabbed safely?
     self.current_ll = current_ll # for diagnostics
Exemple #3
0
    def sample(self, gp_likelihood):
        """generate a new sample of parameters for the model

        Notes
        -----
        The parameters are stored as self.params which is a list of Params objects.
        The values of the parameters are updated on each call.  Pesumably the value of
        the parameter affects the model (this is not required, but it would be a bit
        pointless othewise)

        """
        # turn self.params into a 1d numpy array
        for i in xrange(self.thinning + 1):
            # get a new value for the parameter array via slice sampling
            self.params, current_ll = slice_sample(self.params,
                                                   self.logprob,
                                                   gp_likelihood,
                                                   step_out=False)
        self.current_ll = current_ll  # for diagnostics
        return self.params.copy()
Exemple #4
0
    import matplotlib.pyplot as plt


    n = 10000

    # Test on 1D Gaussian
    x_samples = np.zeros(n)
    x = np.zeros(1)

    gsn = priors.Gaussian(mu = -1, sigma = 4)

    for i in xrange(n):
        if i % 1000 == 0:
            print('Sample %d/%d' % (i,n))

        x, cur_ll = slice_sample(x, gsn. logprob)
        x_samples[i] = x.copy()

    print('1D Gaussian actual mean: %f, mean of samples: %f' % (-1, np.mean(x_samples)))
    print('1D Gaussian actual sigma: %f, std of samples: %f' % (4, np.std(x_samples)))


    plt.figure(1)
    plt.clf()
    plt.hist(x_samples, 40)
    plt.savefig('slice_sampler_test.pdf')

    # Test on 2D Gaussian
    mu = np.array([-2, 5])
    a = npr.rand(2,2)
    cov = np.dot(a,a.T)
Exemple #5
0
#r.par(mfrow=[2,2])
# Exhaustive computation to compare to slice sampler

log_concs = np.arange(-50,20,.1)
lls = np.array([ loglik(log_conc) for log_conc in log_concs ])
mle = log_concs[lls.argmax()]
print "MLE", np.exp(mle)
lps = np.array([ logpost(log_conc) for log_conc in log_concs ])
map = log_concs[lps.argmax()]
print "MAP", np.exp(map)
#r.plot(log_concs, lls, xlab='x',ylab='ll')
#plot(log_concs, lls, main='loglik')
#plot(log_concs, lps, main='unnorm logpost')


# For comparisons

real_s = r.sample(log_concs,len(histories)*2,prob=np.exp(lps),replace=True)


# Slice sampler

h = mcmc.slice_sample(logpost, 0.0, 1.0, niter=1000)
h = np.array(h)

# Replications of single slice-sample
#for i in range(500):
#  init = histories[i][-1]
#  h = mcmc.slice_sample(logpost, init, 1.0, niter=200)
#  histories[i] += h