Beispiel #1
0
def test_shuffle_with_random_state():
    gm_1 = GaussianMixture(
        20,
        centers,
        covariances,
        class_probs,
        random_state=42,
        shuffle=True,
        shuffle_random_state=42,
    )
    gm_1.sample_views("poly")
    Xs_1, y_1 = gm_1.get_Xy()
    gm_2 = GaussianMixture(
        20,
        centers,
        covariances,
        class_probs,
        random_state=42,
        shuffle=True,
        shuffle_random_state=42,
    )
    gm_2.sample_views("poly")
    Xs_2, y_2 = gm_2.get_Xy()
    for view1, view2 in zip(Xs_1, Xs_2):
        assert np.allclose(view1, view2)
    assert np.allclose(y_1, y_2)
Beispiel #2
0
def test_random_state():
    gm_1 = GaussianMixture(mu, sigma, 10, class_probs, random_state=42)
    gm_1.sample_views('poly')
    Xs_1, y_1 = gm_1.get_Xy()
    gm_2 = GaussianMixture(mu, sigma, 10, class_probs, random_state=42)
    gm_2.sample_views('poly')
    Xs_2, y_2 = gm_2.get_Xy()
    for view1, view2 in zip(Xs_1, Xs_2):
        assert np.allclose(view1, view2)
    assert np.allclose(y_1, y_2)
Beispiel #3
0
def test_noise_dims_not_same_but_reproducible():
    gm_1 = GaussianMixture(mu, sigma, 20, class_probs, random_state=42)
    gm_1.sample_views('poly', n_noise=2)
    Xs_2, y_2 = gm_1.get_Xy()
    view1_noise, view2_noise = Xs_2[0][:, -2:], Xs_2[1][:, -2:]
    assert not np.allclose(view1_noise, view2_noise)
    gm_2 = GaussianMixture(mu, sigma, 20, class_probs, random_state=42)
    gm_2.sample_views('poly', n_noise=2)
    Xs_2, y_2 = gm_1.get_Xy()
    view1_noise2, view2_noise2 = Xs_2[0][:, -2:], Xs_2[1][:, -2:]
    assert np.allclose(view1_noise, view1_noise2)
    assert np.allclose(view2_noise, view2_noise2)
Beispiel #4
0
def test_noise_dims_not_same_but_reproducible():
    gm_1 = GaussianMixture(20,
                           centers,
                           covariances,
                           class_probs,
                           random_state=42)
    gm_1.sample_views("poly", n_noise=2)
    Xs_2, _ = gm_1.get_Xy()
    view1_noise, view2_noise = Xs_2[0][:, -2:], Xs_2[1][:, -2:]
    assert not np.allclose(view1_noise, view2_noise)
    gm_2 = GaussianMixture(20,
                           centers,
                           covariances,
                           class_probs,
                           random_state=42)
    gm_2.sample_views("poly", n_noise=2)
    Xs_2, _ = gm_1.get_Xy()
    view1_noise2, view2_noise2 = Xs_2[0][:, -2:], Xs_2[1][:, -2:]
    assert np.allclose(view1_noise, view1_noise2)
    assert np.allclose(view2_noise, view2_noise2)
Beispiel #5
0
def test_shuffle():
    np.random.seed(42)
    gm_1 = GaussianMixture(mu,
                           sigma,
                           20,
                           class_probs,
                           random_state=42,
                           shuffle=True,
                           shuffle_random_state=42)
    gm_1.sample_views('poly')
    Xs_1, y_1 = gm_1.get_Xy()
    np.random.seed(30)
    gm_2 = GaussianMixture(mu,
                           sigma,
                           20,
                           class_probs,
                           random_state=42,
                           shuffle=True,
                           shuffle_random_state=10)
    gm_2.sample_views('poly')
    Xs_2, y_2 = gm_2.get_Xy()
    for view1, view2 in zip(Xs_1, Xs_2):
        assert not np.allclose(view1, view2)
    assert not np.allclose(y_1, y_2)
Beispiel #6
0
def test_no_sample():
    gaussm = GaussianMixture(mu, sigma, n, class_probs)
    with pytest.raises(NameError):
        gaussm.get_Xy()
Beispiel #7
0
# Latent variables are sampled from two multivariate Gaussians with equal
# prior probability. Then a polynomial transformation is applied and noise is
# added independently to both the transformed and untransformed latents.

n_samples = 100
centers = [[0, 1], [0, -1]]
covariances = [np.eye(2), np.eye(2)]
gm = GaussianMixture(n_samples,
                     centers,
                     covariances,
                     random_state=42,
                     shuffle=True,
                     shuffle_random_state=42)
gm = gm.sample_views(transform='poly', n_noise=2)

latent, y = gm.get_Xy(latents=True)
Xs, _ = gm.get_Xy(latents=False)

# The latent data is plotted against itself to reveal the underlying
# distribtution.

crossviews_plot([latent, latent],
                labels=y,
                title='Latent Variable',
                equal_axes=True)

# The noisy latent variable (view 1) is plotted against the transformed latent
# variable (view 2), an example of a dataset with two views.

crossviews_plot(Xs,
                labels=y,
Beispiel #8
0
def test_no_sample():
    gaussm = GaussianMixture(n_samples, centers, covariances, class_probs)
    with pytest.raises(NameError):
        gaussm.get_Xy()
Beispiel #9
0
gm_train = GaussianMixture(n_samples, centers, covariances)

# Test
gm_test = GaussianMixture(n_samples, centers, covariances)

# Make 2 views
n_noise = 2
transforms = ['linear', 'poly', 'sin']

Xs_train = []
Xs_test = []
for transform in transforms:
    gm_train.sample_views(transform=transform, n_noise=n_noise)
    gm_test.sample_views(transform=transform, n_noise=n_noise)

    Xs_train.append(gm_train.get_Xy()[0])
    Xs_test.append(gm_test.get_Xy()[0])

# Plotting parameters
labels = gm_test.latent_[:, 0]
cmap = matplotlib.colors.ListedColormap(
    sns.diverging_palette(240, 10, n=len(labels), center='light').as_hex())
cmap = 'coolwarm'

method_labels = \
    ['Raw Views', 'Linear KCCA', 'Polynomial KCCA', 'Gaussian KCCA', 'DCCA']
transform_labels = \
    ['Linear Transform', 'Polynomial Transform', 'Sinusoidal Transform']

input_size1, input_size2 = Xs_train[0][0].shape[1], Xs_train[0][1].shape[1]
outdim_size = min(Xs_train[0][0].shape[1], 2)
Beispiel #10
0
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# Latent variables are sampled from two multivariate Gaussians with equal
# prior probability. Then a polynomial transformation is applied and noise is
# added independently to both the transformed and untransformed latents.

n_samples = 2000
means = [[0, 1], [0, -1]]
covariances = [np.eye(2), np.eye(2)]
gm = GaussianMixture(n_samples,
                     means,
                     covariances,
                     random_state=42,
                     shuffle=True,
                     shuffle_random_state=42)
latent, y = gm.get_Xy(latents=True)

# Plot latent data against itself to reveal the underlying distribtution.
crossviews_plot([latent, latent],
                labels=y,
                title='Latent Variable',
                equal_axes=True)

# Split data into train and test sets
Xs, y = gm.sample_views(transform='poly', n_noise=2).get_Xy()
Xs_train, Xs_test, y_train, y_test = train_test_split(Xs,
                                                      y,
                                                      test_size=0.3,
                                                      random_state=42)

# Plot the testing data after polynomial transformation