示例#1
0
def test_regression_squared_loss():
    X, y = make_regression(n_samples=100, n_features=10, n_informative=8, random_state=0)
    reg = SGDRegressor(loss="squared", penalty="l2", learning_rate="constant", eta0=1e-2, random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y) ** 2), 4.913, 3)
示例#2
0
def test_regression_squared_loss():
    X, y = make_regression(n_samples=100, n_features=10, n_informative=8,
                           random_state=0)
    reg = SGDRegressor(loss="squared", penalty="l2", learning_rate="constant",
                       eta0=1e-2, random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y) ** 2), 4.913, 3)
示例#3
0
def test_regression_squared_loss_multiple_output():
    X, y = make_regression(n_samples=100, n_features=10, n_informative=8, random_state=0)
    reg = SGDRegressor(loss="squared", penalty="l2", learning_rate="constant", eta0=1e-2, random_state=0, max_iter=10)
    Y = np.zeros((len(y), 2))
    Y[:, 0] = y
    Y[:, 1] = y
    reg.fit(X, Y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - Y) ** 2), 4.541, 3)
示例#4
0
def test_regression_squared_loss_nn_l2():
    X, y, _ = make_nn_regression(n_samples=100, n_features=10, n_informative=8, random_state=0)

    reg = SGDRegressor(loss="squared", penalty="nnl2", learning_rate="constant", eta0=1e-1, alpha=1e-4, random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y) ** 2), 0.033, 3)
    assert_almost_equal(reg.coef_.sum(), 2.131, 3)
    assert_false((reg.coef_ < 0).any())
def test_regression_squared_loss(reg_train_data):
    X, y = reg_train_data
    reg = SGDRegressor(loss="squared",
                       penalty="l2",
                       learning_rate="constant",
                       eta0=1e-2,
                       random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    np.testing.assert_almost_equal(np.mean((pred - y)**2), 4.749, 3)
示例#6
0
def test_regression_squared_loss_multiple_output():
    X, y = make_regression(n_samples=100, n_features=10, n_informative=8,
                           random_state=0)
    reg = SGDRegressor(loss="squared", penalty="l2", learning_rate="constant",
                       eta0=1e-2, random_state=0, max_iter=10)
    Y = np.zeros((len(y), 2))
    Y[:, 0] = y
    Y[:, 1] = y
    reg.fit(X, Y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - Y) ** 2), 4.541, 3)
示例#7
0
def test_regression_squared_loss_nn_l2():
    X, y, _ = make_nn_regression(n_samples=100, n_features=10, n_informative=8,
                                 random_state=0)

    reg = SGDRegressor(loss="squared", penalty="nnl2", learning_rate="constant",
                       eta0=1e-1, alpha=1e-4, random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    assert_almost_equal(np.mean((pred - y) ** 2), 0.033, 3)
    assert_almost_equal(reg.coef_.sum(), 2.131, 3)
    assert_false((reg.coef_ < 0).any())
def test_regression_squared_loss_nn_l1(reg_nn_train_data, alpha):
    X, y = reg_nn_train_data
    reg = SGDRegressor(loss="squared",
                       penalty="nn",
                       learning_rate="constant",
                       eta0=1e-1,
                       alpha=alpha,
                       random_state=0)

    reg.fit(X, y)
    pred = reg.predict(X)
    np.testing.assert_almost_equal(np.mean((pred - y)**2), 0.016, 3)
    assert (reg.coef_ >= 0).all()
def test_regression_squared_loss_multiple_output(reg_train_data):
    X, y = reg_train_data
    reg = SGDRegressor(loss="squared",
                       penalty="l2",
                       learning_rate="constant",
                       eta0=1e-2,
                       random_state=0,
                       max_iter=10)
    Y = np.zeros((len(y), 2))
    Y[:, 0] = y
    Y[:, 1] = y
    reg.fit(X, Y)
    pred = reg.predict(X)
    np.testing.assert_almost_equal(np.mean((pred - Y)**2), 4.397, 3)
示例#10
0
def test_regression_squared_loss_nn_l1():
    X, y, _ = make_nn_regression(n_samples=100,
                                 n_features=10,
                                 n_informative=8,
                                 random_state=0)

    for alpha in (0, 1e-6):
        reg = SGDRegressor(loss="squared",
                           penalty="nn",
                           learning_rate="constant",
                           eta0=1e-1,
                           alpha=alpha,
                           random_state=0)

        reg.fit(X, y)
        pred = reg.predict(X)
        np.testing.assert_almost_equal(np.mean((pred - y)**2), 0.016, 3)
        assert (reg.coef_ >= 0).all()