Exemple #1
0
    def test_loglike_against_gaussian(self):
        mu = np.random.randn(3)
        A = mu[:, None]
        sigma = np.random.randn(3, 3)
        sigma = sigma.dot(sigma.T)

        data = np.random.randn(25, mu.shape[0])

        d1 = distributions.Gaussian(mu=mu, sigma=sigma)
        likes1 = d1.log_likelihood(data)

        d2 = self.distribution_class(A=A, sigma=sigma)
        likes2 = d2.log_likelihood(
            np.hstack((np.ones((data.shape[0], 1)), data)))

        assert np.allclose(likes1, likes2)
plt.interactive(True)

from pybasicbayes import models, distributions

GENERATE_DATA = True

###########################
#  generate or load data  #
###########################

alpha_0 = 5.0
obs_hypparams = dict(mu_0=np.zeros(2), sigma_0=np.eye(2), kappa_0=0.05, nu_0=5)

priormodel = models.Mixture(
    alpha_0=alpha_0,
    components=[distributions.Gaussian(**obs_hypparams) for itr in range(30)])
data, _ = priormodel.generate(100)
del priormodel

plt.figure()
plt.plot(data[:, 0], data[:, 1], 'kx')
plt.title('data')

raw_input()  # pause for effect

###############
#  inference  #
###############

posteriormodel = models.Mixture(
    alpha_0=alpha_0,
Exemple #3
0
 def test_empirical_bayes(self):
     data = np.random.randn(50, 2)
     distributions.Gaussian().empirical_bayes(data).hypparams