def good_graph(X): a = EM.EM_algorithm(components=2, tol=1e-6, max_iter=60) a.fit(X) y = a.res['gamma'].argmax(axis=1) plt.scatter(X[:, 0], X[:, 1], c=y, s=90) plt.title('Good clastering', fontsize=30) plt.show()
def inc_log(X): a = EM.EM_algorithm(components=10, tol=1e-10, max_iter=60) a.fit(X) plt.title('Increase of log-likelihood', fontsize=30) plt.xlabel('number of iteration', fontsize=15) plt.ylabel('log-likelihood', fontsize=15) plt.plot(a.res['likelihood']) plt.show()
def bad_graph(X): mu_s = np.array([[-5, 0], [0, 5]]) a = EM.EM_algorithm(components=2, mu_s=mu_s, tol=1e-6, max_iter=60) a.fit(X) y = a.res['gamma'].argmax(axis=1) plt.scatter(X[:, 0], X[:, 1], c=y, s=90) plt.title('Bad clastering', fontsize=30) plt.show()
def EM_s(X, count, max_iter=100, mu_s=None, sigma_s=None, pi_s=None, components=10, tol=1e-3, cov_type='full'): like_hood = -np.inf for i in range(count): a = EM.EM_algorithm(sigma_s=sigma_s, mu_s=mu_s, pi_s=pi_s, cov_type=cov_type, components=components, tol=tol, max_iter=max_iter) a.fit(X) like_hood = a.res['likelihood'][-1] sigma = a.res['sigma'] mu = a.res['mu'] pi = a.res['pi']