def test_check_means(): rng = np.random.RandomState(0) rand_data = RandomData(rng) n_components, n_features = rand_data.n_components, rand_data.n_features X = rand_data.X['full'] g = GaussianMixture(n_components=n_components) # Check means bad shape means_bad_shape = rng.rand(n_components + 1, n_features) g.means_init = means_bad_shape assert_raise_message(ValueError, "The parameter 'means' should have the shape of ", g.fit, X) # Check good means matrix means = rand_data.means g.means_init = means g.fit(X) assert_array_equal(means, g.means_init)
X_train = np.array([[-4, 2], [-2, 1], [-2, 3], [0, 2], [2, -1], [3, -1], [2, -2], [3, -2]]) estimator = GaussianMixture(n_components=2, max_iter=1000, random_state=0, init_params='random') plt.figure() plt.plot(X_train[:, 0], X_train[:, 1], 'k.', markersize=25) plt.savefig('Figure_6-datapoints.png') plt.close() colors = ['r', 'b'] # initial means estimator.means_init = np.array([[0, 0.5], [0.5, 0]]) # Train the other parameters using the EM algorithm. estimator.fit(X_train) classes = estimator.predict(X_train) plt.figure() for i in range(2): plt.plot(X_train[classes == i, 0], X_train[classes == i, 1], colors[i]+'.', markersize=25, label='class'+str(i+1)) plt.plot(estimator.means_init[i, 0], estimator.means_init[i, 1], colors[i]+'*', markersize=20, label='mean_init') plt.plot(np.mean(X_train[classes == i, 0]), np.mean(X_train[classes == i, 1]), colors[i] + 'P', markersize=15, label='mean_init') plt.title('mean_init='+str(estimator.means_init)) plt.legend() plt.savefig('Figure_6-EM1.png') plt.close() # initial means