Example #1
0
 def test_lda_toy(self):
     expected_n_basis = 7
     model = SCML_Supervised(n_basis=expected_n_basis)
     X = np.array([[0, 0], [1, 1], [2, 2], [3, 3]])
     y = np.array([0, 0, 1, 1])
     basis, n_basis = model._generate_bases_LDA(X, y)
     # All points are along the same line, so the only possible basis will be
     # the vector along that line normalized. In this case it is possible to
     # obtain it with positive or negative orientations.
     expected_basis = np.ones((expected_n_basis, 2)) / np.sqrt(2)
     assert n_basis == expected_n_basis
     np.testing.assert_allclose(np.abs(basis), expected_basis)
Example #2
0
  def test_lda(self, n_samples, n_features, n_classes):
    X, y = make_classification(n_samples=n_samples, n_classes=n_classes,
                               n_features=n_features, n_informative=n_features,
                               n_redundant=0, n_repeated=0)
    X = StandardScaler().fit_transform(X)

    model = SCML_Supervised()
    basis, n_basis = model._generate_bases_LDA(X, y)

    num_eig = min(n_classes - 1, n_features)
    expected_n_basis = min(20 * n_features, n_samples * 2 * num_eig - 1)
    assert n_basis == expected_n_basis
    assert basis.shape == (expected_n_basis, n_features)
Example #3
0
  def test_lda(self, n_samples, n_features, n_classes):
    """
    Test that when n_basis=None, the correct n_basis is generated,
    for SCML_Supervised and different values of n_samples, n_features
    and n_classes.
    """
    X, y = make_classification(n_samples=n_samples, n_classes=n_classes,
                               n_features=n_features, n_informative=n_features,
                               n_redundant=0, n_repeated=0)
    X = StandardScaler().fit_transform(X)

    msg = "As no value for `n_basis` was selected, "
    with pytest.warns(UserWarning) as raised_warning:
      model = SCML_Supervised(n_basis=None)  # Explicit n_basis=None
      basis, n_basis = model._generate_bases_LDA(X, y)
    assert msg in str(raised_warning[0].message)

    num_eig = min(n_classes - 1, n_features)
    expected_n_basis = min(20 * n_features, n_samples * 2 * num_eig - 1)
    assert n_basis == expected_n_basis
    assert basis.shape == (expected_n_basis, n_features)