Exemplo n.º 1
0
def test_ellipses():
    """Test equiprobable ellipses."""
    random_state = check_random_state(0)

    means = np.array([[0.0, 1.0], [2.0, -1.0]])
    covariances = np.array([[[0.5, 0.0], [0.0, 5.0]], [[5.0, 0.0], [0.0,
                                                                    0.5]]])

    gmm = GMM(n_components=2,
              priors=np.array([0.5, 0.5]),
              means=means,
              covariances=covariances,
              random_state=random_state)
    ellipses = gmm.to_ellipses()

    mean, (angle, width, height) = ellipses[0]
    assert_array_almost_equal(means[0], mean)
    assert_equal(angle, 0.5 * np.pi)
    assert_equal(width, np.sqrt(5.0))
    assert_equal(height, np.sqrt(0.5))

    mean, (angle, width, height) = ellipses[1]
    assert_array_almost_equal(means[1], mean)
    assert_equal(angle, -np.pi)
    assert_equal(width, np.sqrt(5.0))
    assert_equal(height, np.sqrt(0.5))
Exemplo n.º 2
0
def test_sklearn_regression():
    """Test regression with GaussianMixtureRegressor."""
    try:
        from gmr.sklearn import GaussianMixtureRegressor
    except ImportError:
        raise SkipTest("sklearn is not available")

    random_state = check_random_state(0)

    n_samples = 200
    x = np.linspace(0, 2, n_samples)[:, np.newaxis]
    y1 = 3 * x[:n_samples // 2] + 1
    y2 = -3 * x[n_samples // 2:] + 7
    noise = random_state.randn(n_samples, 1) * 0.01
    y = np.vstack((y1, y2)) + noise

    gmr = GaussianMixtureRegressor(n_components=2, random_state=random_state)
    gmr.fit(x, y)
    assert_array_almost_equal(gmr.gmm_.priors, 0.5 * np.ones(2), decimal=2)
    assert_array_almost_equal(gmr.gmm_.means[0],
                              np.array([0.5, 2.5]),
                              decimal=2)
    assert_array_almost_equal(gmr.gmm_.means[1],
                              np.array([1.5, 2.5]),
                              decimal=1)

    pred = gmr.predict(x)
    mse = np.sum((y - pred)**2) / n_samples
    assert_less(mse, 0.01)
Exemplo n.º 3
0
def test_kmeanspp_initialization():
    random_state = check_random_state(1)

    n_samples = 300
    n_features = 2
    X = np.ndarray((n_samples, n_features))
    X[:n_samples // 3, :] = random_state.multivariate_normal(
        [0.0, 1.0], [[0.5, -1.0], [-1.0, 5.0]], size=(n_samples // 3, ))
    X[n_samples // 3:-n_samples // 3, :] = random_state.multivariate_normal(
        [-2.0, -2.0], [[3.0, 1.0], [1.0, 1.0]], size=(n_samples // 3, ))
    X[-n_samples // 3:, :] = random_state.multivariate_normal(
        [3.0, 1.0], [[3.0, -1.0], [-1.0, 1.0]], size=(n_samples // 3, ))

    # artificial scaling, makes standard implementation fail
    # either the initial covariances have to be adjusted or we have
    # to normalize the dataset
    X[:, 1] *= 10000.0

    gmm = GMM(n_components=3, random_state=random_state)
    gmm.from_samples(X, init_params="random")
    ellipses = gmm.to_ellipses()
    widths = np.array([ellipsis_params[1]
                       for _, ellipsis_params in ellipses])[:, np.newaxis]
    average_widths_random = np.mean(pdist(widths))

    gmm = GMM(n_components=3, random_state=random_state)
    gmm.from_samples(X, init_params="kmeans++")
    ellipses = gmm.to_ellipses()
    widths = np.array([ellipsis_params[1]
                       for _, ellipsis_params in ellipses])[:, np.newaxis]
    average_widths_kmeanspp = np.mean(pdist(widths))

    # random initialization produces uneven covariance scaling
    assert_less(average_widths_kmeanspp, average_widths_random)
Exemplo n.º 4
0
def test_numerically_robust_responsibilities():
    random_state = check_random_state(0)

    n_samples = 300
    n_features = 2
    X = np.ndarray((n_samples, n_features))
    mean0 = np.array([0.0, 1.0])
    X[:n_samples // 3, :] = random_state.multivariate_normal(
        mean0, [[0.5, -1.0], [-1.0, 5.0]], size=(n_samples // 3, ))
    mean1 = np.array([-2.0, -2.0])
    X[n_samples // 3:-n_samples // 3, :] = random_state.multivariate_normal(
        mean1, [[3.0, 1.0], [1.0, 1.0]], size=(n_samples // 3, ))
    mean2 = np.array([3.0, 1.0])
    X[-n_samples // 3:, :] = random_state.multivariate_normal(
        mean2, [[3.0, -1.0], [-1.0, 1.0]], size=(n_samples // 3, ))

    # artificial scaling, makes naive implementation fail
    X[:, 1] *= 10000.0

    gmm = GMM(n_components=3, random_state=random_state)
    gmm.from_samples(X, init_params="random")
    mean_dists = pdist(gmm.means)
    assert_true(all(mean_dists > 1))
    assert_true(all(1e7 < gmm.covariances[:, 1, 1]))
    assert_true(all(gmm.covariances[:, 1, 1] < 1e9))
Exemplo n.º 5
0
def test_plot():
    """Test plot of MVN."""
    random_state = check_random_state(0)
    mvn = MVN(mean=mean, covariance=covariance, random_state=random_state)

    ax = AxisStub()
    plot_error_ellipse(ax, mvn)
    assert_equal(ax.count, 8)
Exemplo n.º 6
0
def test_plot():
    """Test plot of MVN."""
    random_state = check_random_state(0)
    mvn = MVN(mean=mean, covariance=covariance, random_state=random_state)

    ax = AxisStub()
    plot_error_ellipse(ax, mvn)
    assert_equal(ax.count, 8)
Exemplo n.º 7
0
def test_sample_confidence_region():
    """Test sampling of confidence region."""
    random_state = check_random_state(42)
    mvn = MVN(mean=np.array([1.0, 2.0]),
              covariance=np.array([[1.0, 0.0], [0.0, 4.0]]),
              random_state=random_state)
    samples = mvn.sample_confidence_region(100, 0.9)
    for sample in samples:
        assert_true(mvn.is_in_confidence_region(sample, 0.9))
Exemplo n.º 8
0
def test_probability_density():
    """Test PDF of MVN."""
    random_state = check_random_state(0)
    mvn = MVN(mean, covariance, random_state=random_state)

    x = np.linspace(-100, 100, 201)
    X = np.vstack(map(np.ravel, np.meshgrid(x, x))).T
    p = mvn.to_probability_density(X)
    approx_int = np.sum(p) * ((x[-1] - x[0]) / 201) ** 2
    assert_less(np.abs(1.0 - approx_int), 0.01)
Exemplo n.º 9
0
def test_probability_density():
    """Test PDF of MVN."""
    random_state = check_random_state(0)
    mvn = MVN(mean, covariance, random_state=random_state)

    x = np.linspace(-100, 100, 201)
    X = np.vstack(map(np.ravel, np.meshgrid(x, x))).T
    p = mvn.to_probability_density(X)
    approx_int = np.sum(p) * ((x[-1] - x[0]) / 201)**2
    assert_less(np.abs(1.0 - approx_int), 0.01)
Exemplo n.º 10
0
def test_gmm_to_mvn_vs_mvn():
    random_state = check_random_state(0)
    gmm = GMM(n_components=2, random_state=random_state)
    gmm.from_samples(X)
    mvn_from_gmm = gmm.to_mvn()
    mvn = MVN(random_state=random_state)
    mvn.from_samples(X)
    assert_array_almost_equal(mvn_from_gmm.mean, mvn.mean)
    assert_array_almost_equal(mvn_from_gmm.covariance,
                              mvn.covariance,
                              decimal=3)
Exemplo n.º 11
0
def test_marginal_distribution():
    """Test moments from marginal MVN."""
    random_state = check_random_state(0)
    mvn = MVN(mean=mean, covariance=covariance, random_state=random_state)

    marginalized = mvn.marginalize(np.array([0]))
    assert_equal(marginalized.mean, np.array([0.0]))
    assert_equal(marginalized.covariance, np.array([0.5]))
    marginalized = mvn.marginalize(np.array([1]))
    assert_equal(marginalized.mean, np.array([1.0]))
    assert_equal(marginalized.covariance, np.array([5.0]))
Exemplo n.º 12
0
def test_marginal_distribution():
    """Test moments from marginal MVN."""
    random_state = check_random_state(0)
    mvn = MVN(mean=mean, covariance=covariance, random_state=random_state)

    marginalized = mvn.marginalize(np.array([0]))
    assert_equal(marginalized.mean, np.array([0.0]))
    assert_equal(marginalized.covariance, np.array([0.5]))
    marginalized = mvn.marginalize(np.array([1]))
    assert_equal(marginalized.mean, np.array([1.0]))
    assert_equal(marginalized.covariance, np.array([5.0]))
Exemplo n.º 13
0
def test_ellipse():
    """Test equiprobable ellipse."""
    random_state = check_random_state(0)

    mean = np.array([0.0, 1.0])
    covariance = np.array([[0.5, 0.0], [0.0, 5.0]])
    mvn = MVN(mean=mean, covariance=covariance, random_state=random_state)

    angle, width, height = mvn.to_ellipse()
    assert_equal(angle, 0.5 * np.pi)
    assert_equal(width, np.sqrt(5.0))
    assert_equal(height, np.sqrt(0.5))
Exemplo n.º 14
0
def test_ellipse():
    """Test equiprobable ellipse."""
    random_state = check_random_state(0)

    mean = np.array([0.0, 1.0])
    covariance = np.array([[0.5, 0.0], [0.0, 5.0]])
    mvn = MVN(mean=mean, covariance=covariance, random_state=random_state)

    angle, width, height = mvn.to_ellipse()
    assert_equal(angle, 0.5 * np.pi)
    assert_equal(width, np.sqrt(5.0))
    assert_equal(height, np.sqrt(0.5))
Exemplo n.º 15
0
def test_plot():
    """Test plot of MVN."""
    random_state = check_random_state(0)
    mvn = MVN(mean=mean, covariance=covariance, random_state=random_state)

    ax = AxisStub()
    try:
        plot_error_ellipse(ax, mvn)
    except ImportError:
        raise SkipTest("matplotlib is required for this test")
    assert_equal(ax.count, 8)
    plot_error_ellipse(ax, mvn, color="r")
    assert_equal(ax.count, 16)
Exemplo n.º 16
0
def test_conditional_distribution():
    """Test moments from conditional GMM."""
    random_state = check_random_state(0)

    gmm = GMM(n_components=2, priors=np.array([0.5, 0.5]), means=means,
              covariances=covariances, random_state=random_state)

    conditional = gmm.condition(np.array([1]), np.array([1.0]))
    assert_array_almost_equal(conditional.means[0], np.array([0.0]))
    assert_array_almost_equal(conditional.covariances[0], np.array([[0.3]]))
    conditional = gmm.condition(np.array([0]), np.array([2.0]))
    assert_array_almost_equal(conditional.means[1], np.array([-1.0]))
    assert_array_almost_equal(conditional.covariances[1], np.array([[0.3]]))
Exemplo n.º 17
0
def test_conditional_distribution():
    """Test moments from conditional MVN."""
    random_state = check_random_state(0)

    mean = np.array([0.0, 1.0])
    covariance = np.array([[0.5, 0.0], [0.0, 5.0]])
    mvn = MVN(mean=mean, covariance=covariance, random_state=random_state)

    conditional = mvn.condition(np.array([1]), np.array([5.0]))
    assert_equal(conditional.mean, np.array([0.0]))
    assert_equal(conditional.covariance, np.array([0.5]))
    conditional = mvn.condition(np.array([0]), np.array([0.5]))
    assert_equal(conditional.mean, np.array([1.0]))
    assert_equal(conditional.covariance, np.array([5.0]))
Exemplo n.º 18
0
def test_conditional_distribution():
    """Test moments from conditional MVN."""
    random_state = check_random_state(0)

    mean = np.array([0.0, 1.0])
    covariance = np.array([[0.5, 0.0], [0.0, 5.0]])
    mvn = MVN(mean=mean, covariance=covariance, random_state=random_state)

    conditional = mvn.condition(np.array([1]), np.array([5.0]))
    assert_equal(conditional.mean, np.array([0.0]))
    assert_equal(conditional.covariance, np.array([0.5]))
    conditional = mvn.condition(np.array([0]), np.array([0.5]))
    assert_equal(conditional.mean, np.array([1.0]))
    assert_equal(conditional.covariance, np.array([5.0]))
Exemplo n.º 19
0
def test_conditional_distribution():
    """Test moments from conditional GMM."""
    random_state = check_random_state(0)

    gmm = GMM(n_components=2,
              priors=np.array([0.5, 0.5]),
              means=means,
              covariances=covariances,
              random_state=random_state)

    conditional = gmm.condition(np.array([1]), np.array([1.0]))
    assert_array_almost_equal(conditional.means[0], np.array([0.0]))
    assert_array_almost_equal(conditional.covariances[0], np.array([[0.3]]))
    conditional = gmm.condition(np.array([0]), np.array([2.0]))
    assert_array_almost_equal(conditional.means[1], np.array([-1.0]))
    assert_array_almost_equal(conditional.covariances[1], np.array([[0.3]]))
Exemplo n.º 20
0
def test_verbose_from_samples():
    """Test verbose output."""
    global X
    random_state = check_random_state(0)

    old_stdout = sys.stdout
    sys.stdout = StringIO()
    try:
        gmm = GMM(n_components=2, verbose=True, random_state=random_state)
        gmm.from_samples(X)
    finally:
        out = sys.stdout.getvalue()
        sys.stdout.close()
        sys.stdout = old_stdout

    assert("converged" in out)
Exemplo n.º 21
0
def test_estimation_from_previous_initialization():
    global X
    global random_state
    global means
    global covariances

    gmm = GMM(n_components=2,
              priors=0.5 * np.ones(2),
              means=np.copy(means),
              covariances=np.copy(covariances),
              random_state=check_random_state(2))
    gmm.from_samples(X, n_iter=2)
    assert_less(np.linalg.norm(gmm.means[0] - means[0]), 0.01)
    assert_less(np.linalg.norm(gmm.covariances[0] - covariances[0]), 0.03)
    assert_less(np.linalg.norm(gmm.means[1] - means[1]), 0.01)
    assert_less(np.linalg.norm(gmm.covariances[1] - covariances[1]), 0.04)
Exemplo n.º 22
0
def test_verbose_from_samples():
    """Test verbose output."""
    global X
    random_state = check_random_state(0)

    old_stdout = sys.stdout
    sys.stdout = StringIO()
    try:
        gmm = GMM(n_components=2, verbose=True, random_state=random_state)
        gmm.from_samples(X)
    finally:
        out = sys.stdout.getvalue()
        sys.stdout.close()
        sys.stdout = old_stdout

    assert ("converged" in out)
Exemplo n.º 23
0
def test_probability_density_without_noise():
    """Test probability density of MVN with not invertible covariance."""
    random_state = check_random_state(0)

    n_samples = 10
    x = np.linspace(0, 1, n_samples)[:, np.newaxis]
    y = np.ones((n_samples, 1))
    samples = np.hstack((x, y))

    mvn = MVN(random_state=random_state)
    mvn.from_samples(samples)
    assert_array_almost_equal(mvn.mean, np.array([0.5, 1.0]), decimal=2)
    assert_equal(mvn.covariance[1, 1], 0.0)
    p_training = mvn.to_probability_density(samples)
    p_test = mvn.to_probability_density(samples + 1)
    assert_true(np.all(p_training > p_test))
Exemplo n.º 24
0
def test_probability_density_without_noise():
    """Test probability density of MVN with not invertible covariance."""
    random_state = check_random_state(0)

    n_samples = 10
    x = np.linspace(0, 1, n_samples)[:, np.newaxis]
    y = np.ones((n_samples, 1))
    samples = np.hstack((x, y))

    mvn = MVN(random_state=random_state)
    mvn.from_samples(samples)
    assert_array_almost_equal(mvn.mean, np.array([0.5, 1.0]), decimal=2)
    assert_equal(mvn.covariance[1, 1], 0.0)
    p_training = mvn.to_probability_density(samples)
    p_test = mvn.to_probability_density(samples + 1)
    assert_true(np.all(p_training > p_test))
Exemplo n.º 25
0
def test_sample_confidence_region():
    """Test sampling from confidence region."""
    random_state = check_random_state(0)

    means = np.array([[0.0, 1.0], [2.0, -1.0]])
    covariances = np.array([[[0.5, 0.0], [0.0, 5.0]], [[5.0, 0.0], [0.0,
                                                                    0.5]]])

    gmm = GMM(n_components=2,
              priors=np.array([0.5, 0.5]),
              means=means,
              covariances=covariances,
              random_state=random_state)
    samples = gmm.sample_confidence_region(100, 0.7)
    for sample in samples:
        assert_true(gmm.is_in_confidence_region(sample, 0.7))
Exemplo n.º 26
0
def test_regression_with_2d_input():
    """Test regression with GMM and two-dimensional input."""
    random_state = check_random_state(0)

    n_samples = 200
    x = np.linspace(0, 2, n_samples)[:, np.newaxis]
    y1 = 3 * x[:n_samples // 2] + 1
    y2 = -3 * x[n_samples // 2:] + 7
    noise = random_state.randn(n_samples, 1) * 0.01
    y = np.vstack((y1, y2)) + noise
    samples = np.hstack((x, x[::-1], y))

    gmm = GMM(n_components=2, random_state=random_state)
    gmm.from_samples(samples)

    pred = gmm.predict(np.array([0, 1]), np.hstack((x, x[::-1])))
    mse = np.sum((y - pred)**2) / n_samples
Exemplo n.º 27
0
def test_regression_without_noise():
    """Test regression without noise with MVN."""
    random_state = check_random_state(0)

    n_samples = 10
    x = np.linspace(0, 1, n_samples)[:, np.newaxis]
    y = 3 * x + 1
    samples = np.hstack((x, y))

    mvn = MVN(random_state=random_state)
    mvn.from_samples(samples)
    assert_array_almost_equal(mvn.mean, np.array([0.5, 2.5]), decimal=2)

    pred, cov = mvn.predict(np.array([0]), x)
    mse = np.sum((y - pred) ** 2) / n_samples
    assert_less(mse, 1e-10)
    assert_less(cov[0, 0], 1e-10)
Exemplo n.º 28
0
def test_uninitialized():
    """Test behavior of uninitialized MVN."""
    random_state = check_random_state(0)
    mvn = MVN(random_state=random_state)
    assert_raises(ValueError, mvn.sample, 10)
    assert_raises(ValueError, mvn.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, mvn.marginalize, np.zeros(0))
    assert_raises(ValueError, mvn.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, mvn.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, mvn.to_ellipse)
    mvn = MVN(mean=np.ones(2), random_state=random_state)
    assert_raises(ValueError, mvn.sample, 10)
    assert_raises(ValueError, mvn.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, mvn.marginalize, np.zeros(0))
    assert_raises(ValueError, mvn.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, mvn.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, mvn.to_ellipse)
Exemplo n.º 29
0
def test_regression_with_2d_input():
    """Test regression with GMM and two-dimensional input."""
    random_state = check_random_state(0)

    n_samples = 200
    x = np.linspace(0, 2, n_samples)[:, np.newaxis]
    y1 = 3 * x[:n_samples / 2] + 1
    y2 = -3 * x[n_samples / 2:] + 7
    noise = random_state.randn(n_samples, 1) * 0.01
    y = np.vstack((y1, y2)) + noise
    samples = np.hstack((x, x[::-1], y))

    gmm = GMM(n_components=2, random_state=random_state)
    gmm.from_samples(samples)

    pred = gmm.predict(np.array([0, 1]), np.hstack((x, x[::-1])))
    mse = np.sum((y - pred) ** 2) / n_samples
Exemplo n.º 30
0
def test_regression_without_noise():
    """Test regression without noise with MVN."""
    random_state = check_random_state(0)

    n_samples = 10
    x = np.linspace(0, 1, n_samples)[:, np.newaxis]
    y = 3 * x + 1
    samples = np.hstack((x, y))

    mvn = MVN(random_state=random_state)
    mvn.from_samples(samples)
    assert_array_almost_equal(mvn.mean, np.array([0.5, 2.5]), decimal=2)

    pred, cov = mvn.predict(np.array([0]), x)
    mse = np.sum((y - pred)**2) / n_samples
    assert_less(mse, 1e-10)
    assert_less(cov[0, 0], 1e-10)
Exemplo n.º 31
0
def test_uninitialized():
    """Test behavior of uninitialized MVN."""
    random_state = check_random_state(0)
    mvn = MVN(random_state=random_state)
    assert_raises(ValueError, mvn.sample, 10)
    assert_raises(ValueError, mvn.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, mvn.marginalize, np.zeros(0))
    assert_raises(ValueError, mvn.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, mvn.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, mvn.to_ellipse)
    mvn = MVN(mean=np.ones(2), random_state=random_state)
    assert_raises(ValueError, mvn.sample, 10)
    assert_raises(ValueError, mvn.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, mvn.marginalize, np.zeros(0))
    assert_raises(ValueError, mvn.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, mvn.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, mvn.to_ellipse)
Exemplo n.º 32
0
def test_estimate_moments():
    """Test moments estimated from samples and sampling from MVN."""
    random_state = check_random_state(0)
    actual_mean = np.array([0.0, 1.0])
    actual_covariance = np.array([[0.5, -1.0], [-1.0, 5.0]])
    X = random_state.multivariate_normal(actual_mean, actual_covariance,
                                         size=(100000,))
    mvn = MVN(random_state=random_state)
    mvn.from_samples(X)
    assert_less(np.linalg.norm(mvn.mean - actual_mean), 0.02)
    assert_less(np.linalg.norm(mvn.covariance - actual_covariance), 0.02)

    X2 = mvn.sample(n_samples=100000)

    mvn2 = MVN(random_state=random_state)
    mvn2.from_samples(X2)
    assert_less(np.linalg.norm(mvn2.mean - actual_mean), 0.03)
    assert_less(np.linalg.norm(mvn2.covariance - actual_covariance), 0.03)
Exemplo n.º 33
0
def test_estimate_moments():
    """Test moments estimated from samples and sampling from MVN."""
    random_state = check_random_state(0)
    actual_mean = np.array([0.0, 1.0])
    actual_covariance = np.array([[0.5, -1.0], [-1.0, 5.0]])
    X = random_state.multivariate_normal(actual_mean, actual_covariance,
                                         size=(100000,))
    mvn = MVN(random_state=random_state)
    mvn.from_samples(X)
    assert_less(np.linalg.norm(mvn.mean - actual_mean), 0.02)
    assert_less(np.linalg.norm(mvn.covariance - actual_covariance), 0.02)

    X2 = mvn.sample(n_samples=100000)

    mvn2 = MVN(random_state=random_state)
    mvn2.from_samples(X2)
    assert_less(np.linalg.norm(mvn2.mean - actual_mean), 0.03)
    assert_less(np.linalg.norm(mvn2.covariance - actual_covariance), 0.03)
Exemplo n.º 34
0
def test_regression_without_noise():
    """Test regression without noise."""
    random_state = check_random_state(0)

    n_samples = 200
    x = np.linspace(0, 2, n_samples)[:, np.newaxis]
    y1 = 3 * x[:n_samples / 2] + 1
    y2 = -3 * x[n_samples / 2:] + 7
    y = np.vstack((y1, y2))
    samples = np.hstack((x, y))

    gmm = GMM(n_components=2, random_state=random_state)
    gmm.from_samples(samples)
    assert_array_almost_equal(gmm.priors, 0.5 * np.ones(2), decimal=2)
    assert_array_almost_equal(gmm.means[0], np.array([1.5, 2.5]), decimal=2)
    assert_array_almost_equal(gmm.means[1], np.array([0.5, 2.5]), decimal=1)

    pred = gmm.predict(np.array([0]), x)
    mse = np.sum((y - pred) ** 2) / n_samples
    assert_less(mse, 0.01)
Exemplo n.º 35
0
def test_regression_with_2d_input():
    """Test regression with MVN and two-dimensional input."""
    random_state = check_random_state(0)

    n_samples = 100
    x = np.linspace(0, 1, n_samples)[:, np.newaxis]
    y = 3 * x + 1
    noise = random_state.randn(n_samples, 1) * 0.01
    y += noise
    samples = np.hstack((x, x[::-1], y))

    mvn = MVN(random_state=random_state)
    mvn.from_samples(samples)
    assert_array_almost_equal(mvn.mean, np.array([0.5, 0.5, 2.5]), decimal=2)

    x_test = np.hstack((x, x[::-1]))
    pred, cov = mvn.predict(np.array([0, 1]), x_test)
    mse = np.sum((y - pred)**2) / n_samples
    assert_less(mse, 1e-3)
    assert_less(cov[0, 0], 0.01)
Exemplo n.º 36
0
def test_regression_without_noise():
    """Test regression without noise."""
    random_state = check_random_state(0)

    n_samples = 200
    x = np.linspace(0, 2, n_samples)[:, np.newaxis]
    y1 = 3 * x[:n_samples // 2] + 1
    y2 = -3 * x[n_samples // 2:] + 7
    y = np.vstack((y1, y2))
    samples = np.hstack((x, y))

    gmm = GMM(n_components=2, random_state=random_state)
    gmm.from_samples(samples)
    assert_array_almost_equal(gmm.priors, 0.5 * np.ones(2), decimal=2)
    assert_array_almost_equal(gmm.means[0], np.array([1.5, 2.5]), decimal=2)
    assert_array_almost_equal(gmm.means[1], np.array([0.5, 2.5]), decimal=1)

    pred = gmm.predict(np.array([0]), x)
    mse = np.sum((y - pred)**2) / n_samples
    assert_less(mse, 0.01)
Exemplo n.º 37
0
def test_sklearn_regression_with_1d_output():
    """Test regression with GaussianMixtureRegressor and two-dimensional input."""
    try:
        from gmr.sklearn import GaussianMixtureRegressor
    except ImportError:
        raise SkipTest("sklearn is not available")

    random_state = check_random_state(0)

    n_samples = 200
    x = np.linspace(0, 2, n_samples)[:, np.newaxis]
    y = 3 * x + 1
    y = y.flatten()

    gmr = GaussianMixtureRegressor(n_components=1, random_state=random_state)
    gmr.fit(x, y)

    pred = gmr.predict(x)
    mse = np.sum((y - pred)**2) / n_samples
    assert_greater(mse, 0.01)
Exemplo n.º 38
0
def test_regression_with_2d_input():
    """Test regression with MVN and two-dimensional input."""
    random_state = check_random_state(0)

    n_samples = 100
    x = np.linspace(0, 1, n_samples)[:, np.newaxis]
    y = 3 * x + 1
    noise = random_state.randn(n_samples, 1) * 0.01
    y += noise
    samples = np.hstack((x, x[::-1], y))

    mvn = MVN(random_state=random_state)
    mvn.from_samples(samples)
    assert_array_almost_equal(mvn.mean, np.array([0.5, 0.5, 2.5]), decimal=2)

    x_test = np.hstack((x, x[::-1]))
    pred, cov = mvn.predict(np.array([0, 1]), x_test)
    mse = np.sum((y - pred) ** 2) / n_samples
    assert_less(mse, 1e-3)
    assert_less(cov[0, 0], 0.01)
Exemplo n.º 39
0
def test_uninitialized():
    """Test behavior of uninitialized GMM."""
    random_state = check_random_state(0)
    gmm = GMM(n_components=2, random_state=random_state)
    assert_raises(ValueError, gmm.sample, 10)
    assert_raises(ValueError, gmm.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, gmm.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.to_ellipses)
    gmm = GMM(n_components=2, priors=np.ones(2), random_state=random_state)
    assert_raises(ValueError, gmm.sample, 10)
    assert_raises(ValueError, gmm.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, gmm.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.to_ellipses)
    gmm = GMM(n_components=2, priors=np.ones(2), means=np.zeros((2, 2)),
              random_state=random_state)
    assert_raises(ValueError, gmm.sample, 10)
    assert_raises(ValueError, gmm.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, gmm.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.to_ellipses)
Exemplo n.º 40
0
def test_sklearn_regression_with_2d_input():
    """Test regression with GaussianMixtureRegressor and two-dimensional input."""
    try:
        from gmr.sklearn import GaussianMixtureRegressor
    except ImportError:
        raise SkipTest("sklearn is not available")

    random_state = check_random_state(0)

    n_samples = 200
    x = np.linspace(0, 2, n_samples)[:, np.newaxis]
    y1 = 3 * x[:n_samples // 2] + 1
    y2 = -3 * x[n_samples // 2:] + 7
    noise = random_state.randn(n_samples, 1) * 0.01
    y = np.vstack((y1, y2)) + noise

    gmr = GaussianMixtureRegressor(n_components=2, random_state=random_state)
    gmr.fit(x, y)

    pred = gmr.predict(x)
    mse = np.sum((y - pred)**2) / n_samples
    assert_less(mse, 0.01)
Exemplo n.º 41
0
def test_kmeanspp_initialization():
    random_state = check_random_state(0)

    n_samples = 300
    n_features = 2
    X = np.ndarray((n_samples, n_features))
    mean0 = np.array([0.0, 1.0])
    X[:n_samples // 3, :] = random_state.multivariate_normal(
        mean0, [[0.5, -1.0], [-1.0, 5.0]], size=(n_samples // 3, ))
    mean1 = np.array([-2.0, -2.0])
    X[n_samples // 3:-n_samples // 3, :] = random_state.multivariate_normal(
        mean1, [[3.0, 1.0], [1.0, 1.0]], size=(n_samples // 3, ))
    mean2 = np.array([3.0, 1.0])
    X[-n_samples // 3:, :] = random_state.multivariate_normal(
        mean2, [[3.0, -1.0], [-1.0, 1.0]], size=(n_samples // 3, ))

    # artificial scaling, makes standard implementation fail
    # either the initial covariances have to be adjusted or we have
    # to normalize the dataset
    X[:, 1] *= 10000.0

    gmm = GMM(n_components=3, random_state=random_state)
    gmm.from_samples(X, init_params="random")
    # random initialization fails
    assert_less(gmm.covariances[0, 0, 0], np.finfo(float).eps)
    assert_less(gmm.covariances[1, 0, 0], np.finfo(float).eps)
    assert_less(gmm.covariances[2, 0, 0], np.finfo(float).eps)
    assert_less(gmm.covariances[0, 1, 1], np.finfo(float).eps)
    assert_less(gmm.covariances[1, 1, 1], np.finfo(float).eps)
    assert_less(gmm.covariances[2, 1, 1], np.finfo(float).eps)

    gmm = GMM(n_components=3, random_state=random_state)
    gmm.from_samples(X, init_params="kmeans++")
    mean_dists = pdist(gmm.means)
    assert_true(all(mean_dists > 1))
    assert_true(all(1e7 < gmm.covariances[:, 1, 1]))
    assert_true(all(gmm.covariances[:, 1, 1] < 1e9))
Exemplo n.º 42
0
def test_ellipses():
    """Test equiprobable ellipses."""
    random_state = check_random_state(0)

    means = np.array([[0.0, 1.0],
                      [2.0, -1.0]])
    covariances = np.array([[[0.5, 0.0], [0.0, 5.0]],
                            [[5.0, 0.0], [0.0, 0.5]]])

    gmm = GMM(n_components=2, priors=np.array([0.5, 0.5]), means=means,
              covariances=covariances, random_state=random_state)
    ellipses = gmm.to_ellipses()

    mean, (angle, width, height) = ellipses[0]
    assert_array_almost_equal(means[0], mean)
    assert_equal(angle, 0.5 * np.pi)
    assert_equal(width, np.sqrt(5.0))
    assert_equal(height, np.sqrt(0.5))

    mean, (angle, width, height) = ellipses[1]
    assert_array_almost_equal(means[1], mean)
    assert_equal(angle, -np.pi)
    assert_equal(width, np.sqrt(5.0))
    assert_equal(height, np.sqrt(0.5))
Exemplo n.º 43
0
def test_uninitialized():
    """Test behavior of uninitialized GMM."""
    random_state = check_random_state(0)
    gmm = GMM(n_components=2, random_state=random_state)
    assert_raises(ValueError, gmm.sample, 10)
    assert_raises(ValueError, gmm.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, gmm.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.to_ellipses)
    gmm = GMM(n_components=2, priors=np.ones(2), random_state=random_state)
    assert_raises(ValueError, gmm.sample, 10)
    assert_raises(ValueError, gmm.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, gmm.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.to_ellipses)
    gmm = GMM(n_components=2,
              priors=np.ones(2),
              means=np.zeros((2, 2)),
              random_state=random_state)
    assert_raises(ValueError, gmm.sample, 10)
    assert_raises(ValueError, gmm.to_probability_density, np.ones((1, 1)))
    assert_raises(ValueError, gmm.condition, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.predict, np.zeros(0), np.zeros(0))
    assert_raises(ValueError, gmm.to_ellipses)
Exemplo n.º 44
0
import sys
import numpy as np
from gmr.utils import check_random_state
from nose.tools import assert_equal, assert_less, assert_raises
from numpy.testing import assert_array_almost_equal
from cStringIO import StringIO
from gmr import GMM, plot_error_ellipses
from test_mvn import AxisStub


random_state = check_random_state(0)

means = np.array([[0.0, 1.0],
                  [2.0, -1.0]])
covariances = np.array([[[0.5, -1.0], [-1.0, 5.0]],
                        [[5.0, 1.0], [1.0, 0.5]]])
X1 = random_state.multivariate_normal(means[0], covariances[0], size=(50000,))
X2 = random_state.multivariate_normal(means[1], covariances[1], size=(50000,))
X = np.vstack((X1, X2))


def test_estimate_moments():
    """Test moments estimated from samples and sampling from GMM."""
    global X
    global random_state

    gmm = GMM(n_components=2, random_state=random_state)
    gmm.from_samples(X)
    assert_less(np.linalg.norm(gmm.means[0] - means[0]), 0.005)
    assert_less(np.linalg.norm(gmm.covariances[0] - covariances[0]), 0.01)
    assert_less(np.linalg.norm(gmm.means[1] - means[1]), 0.01)
Exemplo n.º 45
0
Linear Gaussian Models for Regression
=====================================

In this example, we use a MVN to approximate a linear function and a mixture
of MVNs to approximate a nonlinear function. We estimate p(x, y) first and
then we compute the conditional distribution p(y | x).
"""
print(__doc__)

import numpy as np
import matplotlib.pyplot as plt
from gmr.utils import check_random_state
from gmr import MVN, GMM, plot_error_ellipses


random_state = check_random_state(0)

n_samples = 10
X = np.ndarray((n_samples, 2))
X[:, 0] = np.linspace(0, 2 * np.pi, n_samples)
X[:, 1] = 1 - 3 * X[:, 0] + random_state.randn(n_samples)

mvn = MVN(random_state=0)
mvn.from_samples(X)

X_test = np.linspace(0, 2 * np.pi, 100)
mean, covariance = mvn.predict(np.array([0]), X_test[:, np.newaxis])

plt.figure(figsize=(10, 5))

plt.subplot(1, 2, 1)