def test_gmm_to_mvn_vs_mvn(): random_state = check_random_state(0) gmm = GMM(n_components=2, random_state=random_state) gmm.from_samples(X) mvn_from_gmm = gmm.to_mvn() mvn = MVN(random_state=random_state) mvn.from_samples(X) assert_array_almost_equal(mvn_from_gmm.mean, mvn.mean) assert_array_almost_equal(mvn_from_gmm.covariance, mvn.covariance, decimal=3)
def test_probability_density_without_noise(): """Test probability density of MVN with not invertible covariance.""" random_state = check_random_state(0) n_samples = 10 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = np.ones((n_samples, 1)) samples = np.hstack((x, y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 1.0]), decimal=2) assert_equal(mvn.covariance[1, 1], 0.0) p_training = mvn.to_probability_density(samples) p_test = mvn.to_probability_density(samples + 1) assert_true(np.all(p_training > p_test))
def test_regression_without_noise(): """Test regression without noise with MVN.""" random_state = check_random_state(0) n_samples = 10 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = 3 * x + 1 samples = np.hstack((x, y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 2.5]), decimal=2) pred, cov = mvn.predict(np.array([0]), x) mse = np.sum((y - pred) ** 2) / n_samples assert_less(mse, 1e-10) assert_less(cov[0, 0], 1e-10)
def test_regression_without_noise(): """Test regression without noise with MVN.""" random_state = check_random_state(0) n_samples = 10 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = 3 * x + 1 samples = np.hstack((x, y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 2.5]), decimal=2) pred, cov = mvn.predict(np.array([0]), x) mse = np.sum((y - pred)**2) / n_samples assert_less(mse, 1e-10) assert_less(cov[0, 0], 1e-10)
def test_estimate_moments(): """Test moments estimated from samples and sampling from MVN.""" random_state = check_random_state(0) actual_mean = np.array([0.0, 1.0]) actual_covariance = np.array([[0.5, -1.0], [-1.0, 5.0]]) X = random_state.multivariate_normal(actual_mean, actual_covariance, size=(100000,)) mvn = MVN(random_state=random_state) mvn.from_samples(X) assert_less(np.linalg.norm(mvn.mean - actual_mean), 0.02) assert_less(np.linalg.norm(mvn.covariance - actual_covariance), 0.02) X2 = mvn.sample(n_samples=100000) mvn2 = MVN(random_state=random_state) mvn2.from_samples(X2) assert_less(np.linalg.norm(mvn2.mean - actual_mean), 0.03) assert_less(np.linalg.norm(mvn2.covariance - actual_covariance), 0.03)
def test_regression_with_2d_input(): """Test regression with MVN and two-dimensional input.""" random_state = check_random_state(0) n_samples = 100 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = 3 * x + 1 noise = random_state.randn(n_samples, 1) * 0.01 y += noise samples = np.hstack((x, x[::-1], y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 0.5, 2.5]), decimal=2) x_test = np.hstack((x, x[::-1])) pred, cov = mvn.predict(np.array([0, 1]), x_test) mse = np.sum((y - pred)**2) / n_samples assert_less(mse, 1e-3) assert_less(cov[0, 0], 0.01)
def test_regression_with_2d_input(): """Test regression with MVN and two-dimensional input.""" random_state = check_random_state(0) n_samples = 100 x = np.linspace(0, 1, n_samples)[:, np.newaxis] y = 3 * x + 1 noise = random_state.randn(n_samples, 1) * 0.01 y += noise samples = np.hstack((x, x[::-1], y)) mvn = MVN(random_state=random_state) mvn.from_samples(samples) assert_array_almost_equal(mvn.mean, np.array([0.5, 0.5, 2.5]), decimal=2) x_test = np.hstack((x, x[::-1])) pred, cov = mvn.predict(np.array([0, 1]), x_test) mse = np.sum((y - pred) ** 2) / n_samples assert_less(mse, 1e-3) assert_less(cov[0, 0], 0.01)
distributions. """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from sklearn.utils import check_random_state from gmr import MVN, plot_error_ellipse if __name__ == "__main__": random_state = check_random_state(0) mvn = MVN(random_state=random_state) X = random_state.multivariate_normal([0.0, 1.0], [[0.5, -2.0], [-2.0, 5.0]], size=(100,)) mvn.from_samples(X) X_sampled = mvn.sample(n_samples=100) plt.figure(figsize=(15, 5)) plt.subplot(1, 3, 1) plt.xlim((-10, 10)) plt.ylim((-10, 10)) plot_error_ellipse(plt.gca(), mvn) plt.scatter(X[:, 0], X[:, 1], c="g", label="Training data") plt.scatter(X_sampled[:, 0], X_sampled[:, 1], c="r", label="Samples") plt.title("Bivariate Gaussian") plt.legend(loc="best") x = np.linspace(-10, 10, 100) plt.subplot(1, 3, 2) plt.xticks(())
import numpy as np import matplotlib.pyplot as plt from gmr.utils import check_random_state from gmr import MVN, GMM, plot_error_ellipses random_state = check_random_state(0) n_samples = 10 X = np.ndarray((n_samples, 2)) X[:, 0] = np.linspace(0, 2 * np.pi, n_samples) X[:, 1] = 1 - 3 * X[:, 0] + random_state.randn(n_samples) mvn = MVN(random_state=0) mvn.from_samples(X) X_test = np.linspace(0, 2 * np.pi, 100) mean, covariance = mvn.predict(np.array([0]), X_test[:, np.newaxis]) plt.figure(figsize=(10, 5)) plt.subplot(1, 2, 1) plt.title("Linear: $p(Y | X) = \mathcal{N}(\mu_{Y|X}, \Sigma_{Y|X})$") plt.scatter(X[:, 0], X[:, 1]) y = mean.ravel() s = covariance.ravel() plt.fill_between(X_test, y - s, y + s, alpha=0.2) plt.plot(X_test, y, lw=2) n_samples = 100