Esempio n. 1
0
from matplotlib import pyplot as plt
plt.interactive(True)

from pybasicbayes import models, distributions

GENERATE_DATA = True

###########################
#  generate or load data  #
###########################

alpha_0 = 5.0
obs_hypparams = dict(mu_0=np.zeros(2), sigma_0=np.eye(2), kappa_0=0.05, nu_0=5)

priormodel = models.Mixture(
    alpha_0=alpha_0,
    components=[distributions.Gaussian(**obs_hypparams) for itr in range(30)])
data, _ = priormodel.generate(100)
del priormodel

plt.figure()
plt.plot(data[:, 0], data[:, 1], 'kx')
plt.title('data')

raw_input()  # pause for effect

###############
#  inference  #
###############

posteriormodel = models.Mixture(
Esempio n. 2
0
from __future__ import division
import numpy as np
from pybasicbayes import models, distributions

data = np.loadtxt('data.txt')

alpha_0 = 3.
obs_hypparams = dict(mu_0=np.zeros(2), sigma_0=np.eye(2), kappa_0=0.05, nu_0=5)

model = models.Mixture(
    alpha_0=alpha_0,
    components=[distributions.Gaussian(**obs_hypparams) for _ in range(10)])

model.add_data(data)

for _ in xrange(100):
    model.resample_model()

# import itertools, sys, json
# for i in itertools.count():
#     model.resample_model()
#     if i % 3 == 0:
#         print json.dumps(model.to_json_dict())
#         sys.stdout.flush()
Esempio n. 3
0
np.seterr(invalid='raise')
from matplotlib import pyplot as plt
import copy

from pybasicbayes import models, distributions
from pybasicbayes.util.text import progprint_xrange

# EM is really terrible! Here's a demo of how to do it on really easy data

### generate and plot the data

alpha_0 = 100.
obs_hypparams = dict(mu_0=np.zeros(2), sigma_0=np.eye(2), kappa_0=0.05, nu_0=5)

priormodel = models.Mixture(
    alpha_0=alpha_0,
    components=[distributions.Gaussian(**obs_hypparams) for itr in range(6)])

data = priormodel.rvs(200)

del priormodel

plt.figure()
plt.plot(data[:, 0], data[:, 1], 'kx')
plt.title('data')

min_num_components, max_num_components = (1, 12)
num_tries_each = 5

### search over models using BIC as a model selection criterion
Esempio n. 4
0
from __future__ import division
import numpy as np
np.seterr(invalid='raise')
from matplotlib import pyplot as plt
import copy

from pybasicbayes import models, distributions
from pybasicbayes.util.text import progprint_xrange

data = np.array([[12, 20], [12, 20], [12, 20], [12, 20], [12, 20], [22, 5],
                 [22, 5], [22, 5], [22, 5], [22, 5], [8, 0], [8, 0], [8, 0],
                 [8, 0], [8, 0], [1, 12], [1, 12], [1, 12], [1, 12], [1, 12]])

posteriormodel = models.Mixture(alpha_0=2.,
                                components=[
                                    distributions.Multinomial(K=2, alpha_0=2.)
                                    for itr in range(10)
                                ])

posteriormodel.add_data(data)

for itr in progprint_xrange(50):
    posteriormodel.resample_model()