Esempio n. 1
0
def test_regression_squared_loss():
    X, y = make_regression(n_samples=100, n_features=10, n_informative=8,
                           random_state=0)
    reg = SGDRegressor(loss="squared", penalty="l2", learning_rate="constant",
                       eta0=1e-2, random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y) ** 2), 4.913, 3)
Esempio n. 2
0
def test_regression_squared_loss_multiple_output():
    X, y = make_regression(n_samples=100, n_features=10, n_informative=8,
                           random_state=0)
    reg = SGDRegressor(loss="squared", penalty="l2", learning_rate="constant",
                       eta0=1e-2, random_state=0, max_iter=10)
    Y = np.zeros((len(y), 2))
    Y[:, 0] = y
    Y[:, 1] = y
    reg.fit(X, Y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - Y) ** 2), 4.541, 3)
Esempio n. 3
0
def test_regression_squared_loss_nn_l2():
    X, y, _ = make_nn_regression(n_samples=100, n_features=10, n_informative=8,
                                 random_state=0)

    reg = SGDRegressor(loss="squared", penalty="nnl2", learning_rate="constant",
                       eta0=1e-1, alpha=1e-4, random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y) ** 2), 0.033, 3)
    assert_almost_equal(reg.coef_.sum(), 2.131, 3)
    assert_false((reg.coef_ < 0).any())
Esempio n. 4
0
def test_regression_squared_loss():
    X, y = make_regression(n_samples=100,
                           n_features=10,
                           n_informative=8,
                           random_state=0)
    reg = SGDRegressor(loss="squared",
                       penalty="l2",
                       learning_rate="constant",
                       eta0=1e-2,
                       random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y)**2), 4.913, 3)
Esempio n. 5
0
def test_regression_squared_loss_multiple_output():
    X, y = make_regression(n_samples=100,
                           n_features=10,
                           n_informative=8,
                           random_state=0)
    reg = SGDRegressor(loss="squared",
                       penalty="l2",
                       learning_rate="constant",
                       eta0=1e-2,
                       random_state=0,
                       max_iter=10)
    Y = np.zeros((len(y), 2))
    Y[:, 0] = y
    Y[:, 1] = y
    reg.fit(X, Y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - Y)**2), 4.541, 3)
Esempio n. 6
0
def test_regression_squared_loss_nn_l2():
    X, y, _ = make_nn_regression(n_samples=100,
                                 n_features=10,
                                 n_informative=8,
                                 random_state=0)

    reg = SGDRegressor(loss="squared",
                       penalty="nnl2",
                       learning_rate="constant",
                       eta0=1e-1,
                       alpha=1e-4,
                       random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y)**2), 0.033, 3)
    assert_almost_equal(reg.coef_.sum(), 2.131, 3)
    assert_false((reg.coef_ < 0).any())