#++++++++++++++++++++++++ # Learn the model with EM #++++++++++++++++++++++++ # List of learned mixtures lgm[i] is a mixture with i+1 components lgm = [] kmax = 6 bics = N.zeros(kmax) em = EM() for i in range(kmax): lgm.append(GM(d, i+1, mode)) gmm = GMM(lgm[i], 'kmean') em.train(data, gmm, maxiter = 30, thresh = 1e-10) bics[i] = gmm.bic(data) print "Original model has %d clusters, bics says %d" % (k, N.argmax(bics)+1) #+++++++++++++++ # Draw the model #+++++++++++++++ import pylab as P P.subplot(3, 2, 1) for k in range(kmax): P.subplot(3, 2, k+1) level = 0.9 P.plot(data[:, 0], data[:, 1], '.', label = '_nolegend_') # h keeps the handles of the plot, so that you can modify
#++++++++++++++++++++++++ lgm = [] kmax = 6 bics = N.zeros(kmax) for i in range(kmax): # Init the model with an empty Gaussian Mixture, and create a Gaussian # Mixture Model from it lgm.append(GM(d, i+1, mode)) gmm = GMM(lgm[i], 'kmean') # The actual EM, with likelihood computation. The threshold # is compared to the (linearly appromixated) derivative of the likelihood em = EM() em.train(data, gmm, maxiter = 30, thresh = 1e-10) bics[i] = gmm.bic(data) print "Original model has %d clusters, bics says %d" % (k, N.argmax(bics)+1) #+++++++++++++++ # Draw the model #+++++++++++++++ import pylab as P P.subplot(3, 2, 1) for k in range(kmax): P.subplot(3, 2, k+1) # Level is the confidence level for confidence ellipsoids: 1.0 means that # all points will be (almost surely) inside the ellipsoid level = 0.8 if not d == 1: