示例#1
0
def test_multiclass_hinge_sgd():
    for data in (mult_dense, mult_csr):
        for fit_intercept in (True, False):
            clf = SGDClassifier(loss="hinge", multiclass=True,
                                fit_intercept=fit_intercept, random_state=0)
            clf.fit(data, mult_target)
            assert_greater(clf.score(data, mult_target), 0.78)
示例#2
0
def test_multiclass_hinge_sgd_l1l2():
    for data in (mult_dense, mult_csr):
        clf = SGDClassifier(loss="hinge",
                            penalty="l1/l2",
                            multiclass=True,
                            random_state=0)
        clf.fit(data, mult_target)
        assert_greater(clf.score(data, mult_target), 0.75)
def test_multiclass_hinge_sgd_l1l2(data, request):
    X, y = request.getfixturevalue(data)
    clf = SGDClassifier(loss="hinge",
                        penalty="l1/l2",
                        multiclass=True,
                        random_state=0)
    clf.fit(X, y)
    assert clf.score(X, y) > 0.75
def test_multiclass_hinge_sgd(data, fit_intercept, request):
    X, y = request.getfixturevalue(data)
    clf = SGDClassifier(loss="hinge",
                        multiclass=True,
                        fit_intercept=fit_intercept,
                        random_state=0)
    clf.fit(X, y)
    assert clf.score(X, y) > 0.78
示例#5
0
def test_multiclass_squared_hinge_sgd():
    for data in (mult_dense, mult_csr):
        for fit_intercept in (True, False):
            clf = SGDClassifier(loss="squared_hinge", multiclass=True,
                                learning_rate="constant", eta0=1e-3,
                                fit_intercept=fit_intercept, random_state=0)
            clf.fit(data, mult_target)
            assert_greater(clf.score(data, mult_target), 0.78)
def test_multiclass_squared_hinge_sgd(data, fit_intercept, request):
    X, y = request.getfixturevalue(data)
    clf = SGDClassifier(loss="squared_hinge",
                        multiclass=True,
                        learning_rate="constant",
                        eta0=1e-3,
                        fit_intercept=fit_intercept,
                        random_state=0)
    clf.fit(X, y)
    assert clf.score(X, y) > 0.78
示例#7
0
def test_binary_linear_sgd():
    for data in (bin_dense, bin_csr):
        for clf in (
                SGDClassifier(random_state=0,
                              loss="hinge",
                              fit_intercept=True,
                              learning_rate="pegasos"),
                SGDClassifier(random_state=0,
                              loss="hinge",
                              fit_intercept=False,
                              learning_rate="pegasos"),
                SGDClassifier(random_state=0,
                              loss="hinge",
                              fit_intercept=True,
                              learning_rate="invscaling"),
                SGDClassifier(random_state=0,
                              loss="hinge",
                              fit_intercept=True,
                              learning_rate="constant"),
                SGDClassifier(random_state=0,
                              loss="squared_hinge",
                              eta0=1e-2,
                              fit_intercept=True,
                              learning_rate="constant"),
                SGDClassifier(random_state=0,
                              loss="log",
                              fit_intercept=True,
                              learning_rate="constant"),
                SGDClassifier(random_state=0,
                              loss="modified_huber",
                              fit_intercept=True,
                              learning_rate="constant"),
        ):
            clf.fit(data, bin_target)
            assert_greater(clf.score(data, bin_target), 0.934)
            assert_equal(list(clf.classes_), [0, 1])
            if clf.loss in ('log', 'modified_huber'):
                check_predict_proba(clf, data)
            else:
                assert not hasattr(clf, 'predict_proba')
示例#8
0
def test_binary_linear_sgd():
    for data in (bin_dense, bin_csr):
        for clf in (
                SGDClassifier(random_state=0,
                              loss="hinge",
                              fit_intercept=True,
                              learning_rate="pegasos"),
                SGDClassifier(random_state=0,
                              loss="hinge",
                              fit_intercept=False,
                              learning_rate="pegasos"),
                SGDClassifier(random_state=0,
                              loss="hinge",
                              fit_intercept=True,
                              learning_rate="invscaling"),
                SGDClassifier(random_state=0,
                              loss="hinge",
                              fit_intercept=True,
                              learning_rate="constant"),
                SGDClassifier(random_state=0,
                              loss="squared_hinge",
                              eta0=1e-2,
                              fit_intercept=True,
                              learning_rate="constant"),
                SGDClassifier(random_state=0,
                              loss="log",
                              fit_intercept=True,
                              learning_rate="constant"),
                SGDClassifier(random_state=0,
                              loss="modified_huber",
                              fit_intercept=True,
                              learning_rate="constant"),
        ):

            clf.fit(data, bin_target)
            assert_greater(clf.score(data, bin_target), 0.934)
示例#9
0
def test_multiclass_sgd():
    clf = SGDClassifier(random_state=0)
    clf.fit(mult_dense, mult_target)
    assert_greater(clf.score(mult_dense, mult_target), 0.80)
    assert_equal(list(clf.classes_), [0, 1, 2])
示例#10
0
def test_multiclass_hinge_sgd_l1l2():
    for data in (mult_dense, mult_csr):
        clf = SGDClassifier(loss="hinge", penalty="l1/l2", multiclass=True, random_state=0)
        clf.fit(data, mult_target)
        assert_greater(clf.score(data, mult_target), 0.75)
示例#11
0
def test_multiclass_sgd():
    clf = SGDClassifier(random_state=0)
    clf.fit(mult_dense, mult_target)
    assert_greater(clf.score(mult_dense, mult_target), 0.80)
示例#12
0
def test_multiclass_sgd():
    clf = SGDClassifier(random_state=0)
    clf.fit(mult_dense, mult_target)
    assert_greater(clf.score(mult_dense, mult_target), 0.80)
示例#13
0
def test_multiclass_sgd():
    clf = SGDClassifier(random_state=0)
    clf.fit(mult_dense, mult_target)
    assert clf.score(mult_dense, mult_target) > 0.80
    assert list(clf.classes_) == [0, 1, 2]
示例#14
0
def test_multiclass_sgd():
    clf = SGDClassifier(random_state=0)
    clf.fit(mult_dense, mult_target)
    assert_greater(clf.score(mult_dense, mult_target), 0.80)
    assert_equal(list(clf.classes_), [0, 1, 2])
示例#15
0

@pytest.fixture(scope="module")
def reg_nn_train_data():
    X, y, _ = make_nn_regression(n_samples=100,
                                 n_features=10,
                                 n_informative=8,
                                 random_state=0)
    return X, y


@pytest.mark.parametrize("data",
                         ["bin_dense_train_data", "bin_sparse_train_data"])
@pytest.mark.parametrize("clf", [
    SGDClassifier(random_state=0,
                  loss="hinge",
                  fit_intercept=True,
                  learning_rate="pegasos"),
    SGDClassifier(random_state=0,
                  loss="hinge",
                  fit_intercept=False,
                  learning_rate="pegasos"),
    SGDClassifier(random_state=0,
                  loss="hinge",
                  fit_intercept=True,
                  learning_rate="invscaling"),
    SGDClassifier(random_state=0,
                  loss="hinge",
                  fit_intercept=True,
                  learning_rate="constant"),
    SGDClassifier(random_state=0,
                  loss="squared_hinge",