Esempio n. 1
0
try:
    import matplotlib.pyplot as plt
except ImportError:
    raise RuntimeError("This script needs the matplotlib library")

import nipy.algorithms.clustering.gmm as gmm

dim = 2
# 1. generate a 3-components mixture
x1 = np.random.randn(100, dim)
x2 = 3 + 2 * np.random.randn(50, dim)
x3 = np.repeat(np.array([- 2, 2], ndmin=2), 30, 0) \
     + 0.5 * np.random.randn(30, dim)
x = np.concatenate((x1, x2, x3))

# 2. fit the mixture with a bunch of possible models
krange = range(1, 5)
lgmm = gmm.best_fitting_GMM(x,
                            krange,
                            prec_type='diag',
                            niter=100,
                            delta=1.e-4,
                            ninit=1,
                            verbose=0)

# 3, plot the result
z = lgmm.map_label(x)
gmm.plot2D(x, lgmm, z, verbose=0)
plt.show()
Esempio n. 2
0
Requires matplotlib

Author : Bertrand Thirion, 2008-2009
"""
print __doc__

import numpy as np

try:
    import matplotlib.pyplot as plt
except ImportError:
    raise RuntimeError("This script needs the matplotlib library")

import nipy.algorithms.clustering.gmm as gmm

dim = 2
# 1. generate a 3-components mixture
x1 = np.random.randn(100, dim)
x2 = 3 + 2 * np.random.randn(50, dim)
x3 = np.repeat(np.array([-2, 2], ndmin=2), 30, 0) + 0.5 * np.random.randn(30, dim)
x = np.concatenate((x1, x2, x3))

# 2. fit the mixture with a bunch of possible models
krange = range(1, 5)
lgmm = gmm.best_fitting_GMM(x, krange, prec_type="diag", niter=100, delta=1.0e-4, ninit=1, verbose=0)

# 3, plot the result
z = lgmm.map_label(x)
gmm.plot2D(x, lgmm, z, verbose=0)
plt.show()
Esempio n. 3
0
be = -np.inf
for k in krange:
    b = bgmm.VBGMM(k, dim)
    b.guess_priors(x)
    b.initialize(x)
    b.estimate(x)
    ek = float(b.evidence(x))
    if ek > be:
        be = ek
        bestb = b
    print k, 'classes, free energy:', ek

###############################################################################
# 3. plot the result
z = bestb.map_label(x)
plot2D(x, bestb, z, verbose=0)
plt.title('Variational Bayes')

###############################################################################
# 4. the same, with the Gibbs GMM algo
niter = 1000
krange = range(1, 6)
bbf = -np.inf
for k in krange:
    b = bgmm.BGMM(k, dim)
    b.guess_priors(x)
    b.initialize(x)
    b.sample(x, 100)
    w, cent, prec, pz = b.sample(x, niter=niter, mem=1)
    bplugin = bgmm.BGMM(k, dim, cent, prec, w)
    bplugin.guess_priors(x)
Esempio n. 4
0
be = - np.inf
for  k in krange:
    b = bgmm.VBGMM(k, dim)
    b.guess_priors(x)
    b.initialize(x)
    b.estimate(x)
    ek = float(b.evidence(x))
    if ek > be:
        be = ek
        bestb = b
    print k, 'classes, free energy:', ek

###############################################################################
# 3. plot the result
z = bestb.map_label(x)
plot2D(x, bestb, z, verbose=0)
plt.title('Variational Bayes')

###############################################################################
# 4. the same, with the Gibbs GMM algo
niter = 1000
krange = range(1, 6)
bbf = - np.inf
for k in krange:
    b = bgmm.BGMM(k, dim)
    b.guess_priors(x)
    b.initialize(x)
    b.sample(x, 100)
    w, cent, prec, pz = b.sample(x, niter=niter, mem=1)
    bplugin = bgmm.BGMM(k, dim, cent, prec, w)
    bplugin.guess_priors(x)