def test_make_nn_regression():
    X, y, w = make_nn_regression(n_samples=10, n_features=50, n_informative=5)
    assert X.shape[0] == 10
    assert X.shape[1] == 50
    assert y.shape[0] == 10
    assert w.shape[0] == 50
    assert np.sum(X.data != 0) == 10 * 5

    X, y, w = make_nn_regression(n_samples=10, n_features=50, n_informative=50)
    assert np.sum(X.data != 0) == 10 * 50
Exemple #2
0
def test_make_nn_regression():
    X, y, w = make_nn_regression(n_samples=10, n_features=50, n_informative=5)
    assert_equal(X.shape[0], 10)
    assert_equal(X.shape[1], 50)
    assert_equal(y.shape[0], 10)
    assert_equal(w.shape[0], 50)
    assert_equal(np.sum(X.data != 0), 10 * 5)

    X, y, w = make_nn_regression(n_samples=10, n_features=50, n_informative=50)
    assert_equal(np.sum(X.data != 0), 10 * 50)
Exemple #3
0
def test_regression_squared_loss_nn_l2():
    X, y, _ = make_nn_regression(n_samples=100, n_features=10, n_informative=8, random_state=0)

    reg = SGDRegressor(loss="squared", penalty="nnl2", learning_rate="constant", eta0=1e-1, alpha=1e-4, random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y) ** 2), 0.033, 3)
    assert_almost_equal(reg.coef_.sum(), 2.131, 3)
    assert_false((reg.coef_ < 0).any())
Exemple #4
0
def test_regression_squared_loss_nn_l2():
    X, y, _ = make_nn_regression(n_samples=100, n_features=10, n_informative=8,
                                 random_state=0)

    reg = SGDRegressor(loss="squared", penalty="nnl2", learning_rate="constant",
                       eta0=1e-1, alpha=1e-4, random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y) ** 2), 0.033, 3)
    assert_almost_equal(reg.coef_.sum(), 2.131, 3)
    assert_false((reg.coef_ < 0).any())
Exemple #5
0
def test_regression_squared_loss_nn_l1():
    X, y, _ = make_nn_regression(n_samples=100,
                                 n_features=10,
                                 n_informative=8,
                                 random_state=0)

    for alpha in (0, 1e-6):
        reg = SGDRegressor(loss="squared",
                           penalty="nn",
                           learning_rate="constant",
                           eta0=1e-1,
                           alpha=alpha,
                           random_state=0)

        reg.fit(X, y)
        pred = reg.predict(X)
        np.testing.assert_almost_equal(np.mean((pred - y)**2), 0.016, 3)
        assert (reg.coef_ >= 0).all()
def reg_nn_train_data():
    X, y, _ = make_nn_regression(n_samples=100,
                                 n_features=10,
                                 n_informative=8,
                                 random_state=0)
    return X, y