def bench1(mode = 'diag'):
    #===========================================
    # GMM of 20 comp, 20 dimension, 1e4 frames
    #===========================================
    d       = 15
    k       = 30
    nframes = 1e5
    niter   = 10
    mode    = 'diag'

    print "============================================================="
    print "(%d dim, %d components) GMM with %d iterations, for %d frames" \
            % (d, k, niter, nframes)

    #+++++++++++++++++++++++++++++++++++++++++++
    # Create an artificial GMM model, samples it
    #+++++++++++++++++++++++++++++++++++++++++++
    print "Generating the mixture"
    # Generate a model with k components, d dimensions
    w, mu, va   = GM.gen_param(d, k, mode, spread = 3)
    # gm          = GM(d, k, mode)
    # gm.set_param(w, mu, va)
    gm          = GM.fromvalues(w, mu, va)

    # Sample nframes frames  from the model
    data    = gm.sample(nframes)

    #++++++++++++++++++++++++
    # Learn the model with EM
    #++++++++++++++++++++++++

    # Init the model
    print "Init a model for learning, with kmean for initialization"
    lgm = GM(d, k, mode)
    gmm = GMM(lgm, 'kmean')
    
    gmm.init(data)
    # Keep the initialized model for drawing
    gm0 = copy.copy(lgm)

    # The actual EM, with likelihood computation
    like    = N.zeros(niter)

    print "computing..."
    for i in range(niter):
        print "iteration %d" % i
        g, tgd  = gmm.sufficient_statistics(data)
        like[i] = N.sum(N.log(N.sum(tgd, 1)))
        gmm.update_em(data, g)
示例#2
0
def bench1(mode='diag'):
    #===========================================
    # GMM of 20 comp, 20 dimension, 1e4 frames
    #===========================================
    d = 15
    k = 30
    nframes = 1e5
    niter = 10
    mode = 'diag'

    print "============================================================="
    print "(%d dim, %d components) GMM with %d iterations, for %d frames" \
            % (d, k, niter, nframes)

    #+++++++++++++++++++++++++++++++++++++++++++
    # Create an artificial GMM model, samples it
    #+++++++++++++++++++++++++++++++++++++++++++
    print "Generating the mixture"
    # Generate a model with k components, d dimensions
    w, mu, va = GM.gen_param(d, k, mode, spread=3)
    # gm          = GM(d, k, mode)
    # gm.set_param(w, mu, va)
    gm = GM.fromvalues(w, mu, va)

    # Sample nframes frames  from the model
    data = gm.sample(nframes)

    #++++++++++++++++++++++++
    # Learn the model with EM
    #++++++++++++++++++++++++

    # Init the model
    print "Init a model for learning, with kmean for initialization"
    lgm = GM(d, k, mode)
    gmm = GMM(lgm, 'kmean')

    gmm.init(data)
    # Keep the initialized model for drawing
    gm0 = copy.copy(lgm)

    # The actual EM, with likelihood computation
    like = N.zeros(niter)

    print "computing..."
    for i in range(niter):
        print "iteration %d" % i
        g, tgd = gmm.sufficient_statistics(data)
        like[i] = N.sum(N.log(N.sum(tgd, 1)))
        gmm.update_em(data, g)
示例#3
0
from numpy.random import seed

from scipy.sandbox.pyem import GM, GMM, EM
import copy

seed(2)

k       = 4
d       = 2
mode    = 'diag'
nframes = 1e3

#+++++++++++++++++++++++++++++++++++++++++++
# Create an artificial GMM model, samples it
#+++++++++++++++++++++++++++++++++++++++++++
w, mu, va   = GM.gen_param(d, k, mode, spread = 1.0)
gm          = GM.fromvalues(w, mu, va)

# Sample nframes frames  from the model
data    = gm.sample(nframes)

#++++++++++++++++++++++++
# Learn the model with EM
#++++++++++++++++++++++++

# List of learned mixtures lgm[i] is a mixture with i+1 components
lgm     = []
kmax    = 6
bics    = N.zeros(kmax)
em      = EM()
for i in range(kmax):
示例#4
0
#+++++++++++++++++++++++++++++
# Meta parameters of the model
#   - k: Number of components
#   - d: dimension of each Gaussian
#   - mode: Mode of covariance matrix: full or diag (string)
#   - nframes: number of frames (frame = one data point = one
#   row of d elements)
k = 2
d = 2
mode = 'diag'
nframes = 1e3

#+++++++++++++++++++++++++++++++++++++++++++
# Create an artificial GM model, samples it
#+++++++++++++++++++++++++++++++++++++++++++
w, mu, va = GM.gen_param(d, k, mode, spread=1.5)
gm = GM.fromvalues(w, mu, va)

# Sample nframes frames  from the model
data = gm.sample(nframes)

#++++++++++++++++++++++++
# Learn the model with EM
#++++++++++++++++++++++++

# Init the model
lgm = GM(d, k, mode)
gmm = GMM(lgm, 'kmean')
gmm.init(data)

# Keep a copy for drawing later
示例#5
0
#+++++++++++++++++++++++++++++
# Meta parameters of the model
#   - k: Number of components
#   - d: dimension of each Gaussian
#   - mode: Mode of covariance matrix: full or diag (string)
#   - nframes: number of frames (frame = one data point = one
#   row of d elements)
k       = 4 
d       = 2
mode    = 'diag'
nframes = 1e3

#+++++++++++++++++++++++++++++++++++++++++++
# Create an artificial GMM model, samples it
#+++++++++++++++++++++++++++++++++++++++++++
w, mu, va   = GM.gen_param(d, k, mode, spread = 1.0)
gm          = GM.fromvalues(w, mu, va)

# Sample nframes frames  from the model
data    = gm.sample(nframes)

#++++++++++++++++++++++++
# Learn the model with EM
#++++++++++++++++++++++++

lgm     = []
kmax    = 6
bics    = N.zeros(kmax)
for i in range(kmax):
    # Init the model with an empty Gaussian Mixture, and create a Gaussian 
    # Mixture Model from it
示例#6
0
#+++++++++++++++++++++++++++++
# Meta parameters of the model
#   - k: Number of components
#   - d: dimension of each Gaussian
#   - mode: Mode of covariance matrix: full or diag (string)
#   - nframes: number of frames (frame = one data point = one
#   row of d elements)
k       = 2 
d       = 2
mode    = 'diag'
nframes = 1e3

#+++++++++++++++++++++++++++++++++++++++++++
# Create an artificial GM model, samples it
#+++++++++++++++++++++++++++++++++++++++++++
w, mu, va   = GM.gen_param(d, k, mode, spread = 1.5)
gm          = GM.fromvalues(w, mu, va)

# Sample nframes frames  from the model
data    = gm.sample(nframes)

#++++++++++++++++++++++++
# Learn the model with EM
#++++++++++++++++++++++++

# Init the model
lgm = GM(d, k, mode)
gmm = GMM(lgm, 'kmean')
gmm.init(data)

# Keep a copy for drawing later
示例#7
0
#-------------------------------------------------------
# Values for weights, mean and (diagonal) variances
#   - the weights are an array of rank 1
#   - mean is expected to be rank 2 with one row for one component
#   - variances are also expteced to be rank 2. For diagonal, one row
#   is one diagonal, for full, the first d rows are the first variance,
#   etc... In this case, the variance matrix should be k*d rows and d 
#   colums
w   = N.array([0.2, 0.45, 0.35])
mu  = N.array([[4.1, 3], [1, 5], [-2, -3]])
va  = N.array([[1, 1.5], [3, 4], [2, 3.5]])

#-----------------------------------------
# First method: directly from parameters:
# Both methods are equivalents.
gm      = GM.fromvalues(w, mu, va)

#-------------------------------------
# Second method to build a GM instance:
gm      = GM(d, k, mode = 'diag')
# The set_params checks that w, mu, and va corresponds to k, d and m
gm.set_param(w, mu, va)

# Once set_params is called, both methods are equivalent. The 2d
# method is useful when using a GM object for learning (where
# the learner class will set the params), whereas the first one
# is useful when there is a need to quickly sample a model
# from existing values, without a need to give the hyper parameters

# Create a Gaussian Mixture from the parameters, and sample
# 1000 items from it (one row = one 2 dimension sample)