def bench1(mode='diag'): #=========================================== # GMM of 20 comp, 20 dimension, 1e4 frames #=========================================== d = 15 k = 30 nframes = 1e5 niter = 10 mode = 'diag' print "=============================================================" print "(%d dim, %d components) GMM with %d iterations, for %d frames" \ % (d, k, niter, nframes) #+++++++++++++++++++++++++++++++++++++++++++ # Create an artificial GMM model, samples it #+++++++++++++++++++++++++++++++++++++++++++ print "Generating the mixture" # Generate a model with k components, d dimensions w, mu, va = GM.gen_param(d, k, mode, spread=3) # gm = GM(d, k, mode) # gm.set_param(w, mu, va) gm = GM.fromvalues(w, mu, va) # Sample nframes frames from the model data = gm.sample(nframes) #++++++++++++++++++++++++ # Learn the model with EM #++++++++++++++++++++++++ # Init the model print "Init a model for learning, with kmean for initialization" lgm = GM(d, k, mode) gmm = GMM(lgm, 'kmean') gmm.init(data) # Keep the initialized model for drawing gm0 = copy.copy(lgm) # The actual EM, with likelihood computation like = N.zeros(niter) print "computing..." for i in range(niter): print "iteration %d" % i g, tgd = gmm.sufficient_statistics(data) like[i] = N.sum(N.log(N.sum(tgd, 1))) gmm.update_em(data, g)
#+++++++++++++++++++++++++++++++++++++++++++ # Create an artificial GM model, samples it #+++++++++++++++++++++++++++++++++++++++++++ w, mu, va = GM.gen_param(d, k, mode, spread=1.5) gm = GM.fromvalues(w, mu, va) # Sample nframes frames from the model data = gm.sample(nframes) #++++++++++++++++++++++++ # Learn the model with EM #++++++++++++++++++++++++ # Init the model lgm = GM(d, k, mode) gmm = GMM(lgm, 'kmean') gmm.init(data) # Keep a copy for drawing later gm0 = copy.copy(lgm) # The actual EM, with likelihood computation. The threshold # is compared to the (linearly appromixated) derivative of the likelihood em = EM() like = em.train(data, gmm, maxiter=30, thresh=1e-8) #+++++++++++++++ # Draw the model #+++++++++++++++ import pylab as P
gm = GM.fromvalues(w, mu, va) # Sample nframes frames from the model data = gm.sample(nframes) #++++++++++++++++++++++++ # Learn the model with EM #++++++++++++++++++++++++ lgm = [] kmax = 6 bics = N.zeros(kmax) for i in range(kmax): # Init the model with an empty Gaussian Mixture, and create a Gaussian # Mixture Model from it lgm.append(GM(d, i+1, mode)) gmm = GMM(lgm[i], 'kmean') # The actual EM, with likelihood computation. The threshold # is compared to the (linearly appromixated) derivative of the likelihood em = EM() em.train(data, gmm, maxiter = 30, thresh = 1e-10) bics[i] = gmm.bic(data) print "Original model has %d clusters, bics says %d" % (k, N.argmax(bics)+1) #+++++++++++++++ # Draw the model #+++++++++++++++ import pylab as P P.subplot(3, 2, 1)