Exemplo n.º 1
0
def test_shuffle_with_random_state():
    gm_1 = GaussianMixture(
        20,
        centers,
        covariances,
        class_probs,
        random_state=42,
        shuffle=True,
        shuffle_random_state=42,
    )
    gm_1.sample_views("poly")
    Xs_1, y_1 = gm_1.get_Xy()
    gm_2 = GaussianMixture(
        20,
        centers,
        covariances,
        class_probs,
        random_state=42,
        shuffle=True,
        shuffle_random_state=42,
    )
    gm_2.sample_views("poly")
    Xs_2, y_2 = gm_2.get_Xy()
    for view1, view2 in zip(Xs_1, Xs_2):
        assert np.allclose(view1, view2)
    assert np.allclose(y_1, y_2)
Exemplo n.º 2
0
def test_random_state():
    gm_1 = GaussianMixture(mu, sigma, 10, class_probs, random_state=42)
    gm_1.sample_views('poly')
    Xs_1, y_1 = gm_1.get_Xy()
    gm_2 = GaussianMixture(mu, sigma, 10, class_probs, random_state=42)
    gm_2.sample_views('poly')
    Xs_2, y_2 = gm_2.get_Xy()
    for view1, view2 in zip(Xs_1, Xs_2):
        assert np.allclose(view1, view2)
    assert np.allclose(y_1, y_2)
Exemplo n.º 3
0
def test_noise_dims_not_same_but_reproducible():
    gm_1 = GaussianMixture(mu, sigma, 20, class_probs, random_state=42)
    gm_1.sample_views('poly', n_noise=2)
    Xs_2, y_2 = gm_1.get_Xy()
    view1_noise, view2_noise = Xs_2[0][:, -2:], Xs_2[1][:, -2:]
    assert not np.allclose(view1_noise, view2_noise)
    gm_2 = GaussianMixture(mu, sigma, 20, class_probs, random_state=42)
    gm_2.sample_views('poly', n_noise=2)
    Xs_2, y_2 = gm_1.get_Xy()
    view1_noise2, view2_noise2 = Xs_2[0][:, -2:], Xs_2[1][:, -2:]
    assert np.allclose(view1_noise, view1_noise2)
    assert np.allclose(view2_noise, view2_noise2)
Exemplo n.º 4
0
def test_bad_shapes():
    ## Wrong Length
    with pytest.raises(ValueError):
        GaussianMixture([1], sigma, n)
    ## Inconsistent dimension
    with pytest.raises(ValueError):
        GaussianMixture(mu, [np.eye(2), np.eye(3)], n, class_probs)
    ## Wrong uni dimensions
    with pytest.raises(ValueError):
        GaussianMixture([1, 0], [1, 0], n)
    ## Wrong multi sizes
    with pytest.raises(ValueError):
        GaussianMixture(mu, sigma, n, class_probs=[0.3, 0.1, 0.6])
Exemplo n.º 5
0
def test_bad_shapes():
    ## Wrong Length
    with pytest.raises(ValueError):
        GaussianMixture(n_samples, [1], covariances)
    ## Inconsistent dimension
    with pytest.raises(ValueError):
        GaussianMixture(n_samples, centers, [np.eye(2), np.eye(3)],
                        class_probs)
    ## Wrong uni dimensions
    with pytest.raises(ValueError):
        GaussianMixture(n_samples, [1, 0], [1, 0])
    ## Wrong centerslti sizes
    with pytest.raises(ValueError):
        GaussianMixture(n_samples,
                        centers,
                        covariances,
                        class_probs=[0.3, 0.1, 0.6])
Exemplo n.º 6
0
def test_noise_dims_not_same_but_reproducible():
    gm_1 = GaussianMixture(20,
                           centers,
                           covariances,
                           class_probs,
                           random_state=42)
    gm_1.sample_views("poly", n_noise=2)
    Xs_2, _ = gm_1.get_Xy()
    view1_noise, view2_noise = Xs_2[0][:, -2:], Xs_2[1][:, -2:]
    assert not np.allclose(view1_noise, view2_noise)
    gm_2 = GaussianMixture(20,
                           centers,
                           covariances,
                           class_probs,
                           random_state=42)
    gm_2.sample_views("poly", n_noise=2)
    Xs_2, _ = gm_1.get_Xy()
    view1_noise2, view2_noise2 = Xs_2[0][:, -2:], Xs_2[1][:, -2:]
    assert np.allclose(view1_noise, view1_noise2)
    assert np.allclose(view2_noise, view2_noise2)
Exemplo n.º 7
0
def test_shuffle():
    np.random.seed(42)
    gm_1 = GaussianMixture(mu,
                           sigma,
                           20,
                           class_probs,
                           random_state=42,
                           shuffle=True,
                           shuffle_random_state=42)
    gm_1.sample_views('poly')
    Xs_1, y_1 = gm_1.get_Xy()
    np.random.seed(30)
    gm_2 = GaussianMixture(mu,
                           sigma,
                           20,
                           class_probs,
                           random_state=42,
                           shuffle=True,
                           shuffle_random_state=10)
    gm_2.sample_views('poly')
    Xs_2, y_2 = gm_2.get_Xy()
    for view1, view2 in zip(Xs_1, Xs_2):
        assert not np.allclose(view1, view2)
    assert not np.allclose(y_1, y_2)
Exemplo n.º 8
0
def test_no_sample():
    gaussm = GaussianMixture(mu, sigma, n, class_probs)
    with pytest.raises(NameError):
        gaussm.get_Xy()
Exemplo n.º 9
0
def test_bad_class_probs():
    with pytest.raises(ValueError):
        GaussianMixture(mu, sigma, n, class_probs=[0.3, 0.4])
Exemplo n.º 10
0
import pytest
from mvlearn.datasets import GaussianMixture
from numpy.testing import assert_equal
import numpy as np

n = 100
gm_uni = GaussianMixture([0, 1], np.eye(2), n)
mu = [[-1, 0], [1, 0]]
sigma = [[[1, 0], [0, 1]], [[1, 0], [1, 2]]]
class_probs = [0.3, 0.7]
gm_multi = GaussianMixture(mu, sigma, n, class_probs)


def test_multivariate():
    latents, _ = gm_multi.get_Xy(latents=True)
    assert_equal(n, len(latents))
    assert_equal(len(mu[0]), latents.shape[1])


def test_class_probs():
    _, y = gm_multi.get_Xy(latents=True)
    for i, p in enumerate(class_probs):
        assert_equal(int(p * n), list(y).count(i))


def test_transforms():
    transforms = ['linear', 'poly', 'sin', lambda x: 2 * x + 1]
    for transform in transforms:
        gm_uni.sample_views(transform, n_noise=2)
        assert_equal(len(gm_uni.get_Xy()[0]), 2)
        assert_equal(gm_uni.get_Xy()[0][0].shape, (n, 4))
Exemplo n.º 11
0
"""

from mvlearn.datasets import GaussianMixture
from mvlearn.plotting import crossviews_plot
import numpy as np

# Latent variables are sampled from two multivariate Gaussians with equal
# prior probability. Then a polynomial transformation is applied and noise is
# added independently to both the transformed and untransformed latents.

n_samples = 100
centers = [[0, 1], [0, -1]]
covariances = [np.eye(2), np.eye(2)]
gm = GaussianMixture(n_samples,
                     centers,
                     covariances,
                     random_state=42,
                     shuffle=True,
                     shuffle_random_state=42)
gm = gm.sample_views(transform='poly', n_noise=2)

latent, y = gm.get_Xy(latents=True)
Xs, _ = gm.get_Xy(latents=False)

# The latent data is plotted against itself to reveal the underlying
# distribtution.

crossviews_plot([latent, latent],
                labels=y,
                title='Latent Variable',
                equal_axes=True)
Exemplo n.º 12
0
def test_no_sample():
    gaussm = GaussianMixture(n_samples, centers, covariances, class_probs)
    with pytest.raises(NameError):
        gaussm.get_Xy()
Exemplo n.º 13
0
def test_bad_class_probs():
    with pytest.raises(ValueError):
        GaussianMixture(centers,
                        covariances,
                        n_samples,
                        class_probs=[0.3, 0.4])
Exemplo n.º 14
0
import pytest
from mvlearn.datasets import GaussianMixture
from numpy.testing import assert_equal
import numpy as np

n_samples = 100
gm_uni = GaussianMixture(n_samples, [0, 1], np.eye(2))
centers = [[-1, 0], [1, 0]]
covariances = [[[1, 0], [0, 1]], [[1, 0], [1, 2]]]
class_probs = [0.3, 0.7]
gm_centerslti = GaussianMixture(n_samples, centers, covariances, class_probs)


def test_centersltivariate():
    latents, _ = gm_centerslti.get_Xy(latents=True)
    assert_equal(n_samples, len(latents))
    assert_equal(len(centers[0]), latents.shape[1])


def test_class_probs():
    _, y = gm_centerslti.get_Xy(latents=True)
    for i, p in enumerate(class_probs):
        assert_equal(int(p * n_samples), list(y).count(i))


def test_transforms():
    transforms = ["linear", "poly", "sin", lambda x: 2 * x + 1]
    for transform in transforms:
        gm_uni.sample_views(transform, n_noise=2)
        assert_equal(len(gm_uni.get_Xy()[0]), 2)
        assert_equal(gm_uni.get_Xy()[0][0].shape, (n_samples, 4))
Exemplo n.º 15
0
views.

"""

from mvlearn.embed import KCCA, DCCA
from mvlearn.datasets import GaussianMixture
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import seaborn as sns

# Make Latents
n_samples = 200
centers = [[0, 1], [0, -1]]
covariances = 2 * np.array([np.eye(2), np.eye(2)])
gm_train = GaussianMixture(n_samples, centers, covariances)

# Test
gm_test = GaussianMixture(n_samples, centers, covariances)

# Make 2 views
n_noise = 2
transforms = ['linear', 'poly', 'sin']

Xs_train = []
Xs_test = []
for transform in transforms:
    gm_train.sample_views(transform=transform, n_noise=n_noise)
    gm_test.sample_views(transform=transform, n_noise=n_noise)

    Xs_train.append(gm_train.get_Xy()[0])
Exemplo n.º 16
0
======================================
Plotting multiview data with crossplot
======================================

In many cases with multi-view data, especially after use of an embedding
algorithm, one is interested in visualizing two views across dimensions.
One use is assessing correlation between corresponding dimensions of views.
Here, we use this function to display the relationship between two views
simulated from transformations of multi-variant gaussians.

"""

from mvlearn.datasets import GaussianMixture
from mvlearn.plotting import crossviews_plot
import numpy as np


n_samples = 100
centers = [[0, 1], [0, -1]]
covariances = [np.eye(2), np.eye(2)]
GM = GaussianMixture(n_samples, centers, covariances, shuffle=True)
GM = GM.sample_views(transform='poly', n_noise=2)

# Below, we see that the first two dimensions are related by a degree 2
# polynomial while the latter two dimensions are uncorrelated.


crossviews_plot(GM.Xs_, labels=GM.y_,
                title='View 1 vs. View 2 (Polynomial \
                    Transform + noise)', equal_axes=True)