def test_fit_linear_binary_l1r_log_loss(bin_dense_train_data): bin_dense, bin_target = bin_dense_train_data clf = CDClassifier(C=1.0, random_state=0, penalty="l1", loss="log") clf.fit(bin_dense, bin_target) check_predict_proba(clf, bin_dense) acc = clf.score(bin_dense, bin_target) np.testing.assert_almost_equal(acc, 0.995)
def test_fit_linear_binary_l2r_modified_huber(): clf = CDClassifier(C=1.0, random_state=0, penalty="l2", loss="modified_huber") clf.fit(bin_dense, bin_target) check_predict_proba(clf, bin_dense) acc = clf.score(bin_dense, bin_target) assert_almost_equal(acc, 1.0)
def test_sag_proba(): n_samples = 10 X, y = make_classification(n_samples, random_state=0) sag = SAGClassifier(eta=1e-3, alpha=0.0, beta=0.0, max_iter=10, loss='log', random_state=0) sag.fit(X, y) check_predict_proba(sag, X)
def test_adagrad_elastic_log(): clf = AdaGradClassifier(alpha=0.1, l1_ratio=0.85, loss="log", n_iter=10, random_state=0) clf.fit(X_bin, y_bin) assert clf.score(X_bin, y_bin) == 1.0 check_predict_proba(clf, X_bin)
def test_binary_linear_sgd(data, clf, request): X, y = request.getfixturevalue(data) clf.fit(X, y) assert clf.score(X, y) > 0.934 assert list(clf.classes_) == [0, 1] if clf.loss in {'log', 'modified_huber'}: check_predict_proba(clf, X) else: assert not hasattr(clf, 'predict_proba')
def test_binary_linear_sgd(): for data in (bin_dense, bin_csr): for clf in ( SGDClassifier(random_state=0, loss="hinge", fit_intercept=True, learning_rate="pegasos"), SGDClassifier(random_state=0, loss="hinge", fit_intercept=False, learning_rate="pegasos"), SGDClassifier(random_state=0, loss="hinge", fit_intercept=True, learning_rate="invscaling"), SGDClassifier(random_state=0, loss="hinge", fit_intercept=True, learning_rate="constant"), SGDClassifier( random_state=0, loss="squared_hinge", eta0=1e-2, fit_intercept=True, learning_rate="constant" ), SGDClassifier(random_state=0, loss="log", fit_intercept=True, learning_rate="constant"), SGDClassifier(random_state=0, loss="modified_huber", fit_intercept=True, learning_rate="constant"), ): clf.fit(data, bin_target) assert_greater(clf.score(data, bin_target), 0.934) assert_equal(list(clf.classes_), [0, 1]) if clf.loss in ("log", "modified_huber"): check_predict_proba(clf, data) else: assert not hasattr(clf, "predict_proba")
def test_binary_linear_sgd(): for data in (bin_dense, bin_csr): for clf in ( SGDClassifier(random_state=0, loss="hinge", fit_intercept=True, learning_rate="pegasos"), SGDClassifier(random_state=0, loss="hinge", fit_intercept=False, learning_rate="pegasos"), SGDClassifier(random_state=0, loss="hinge", fit_intercept=True, learning_rate="invscaling"), SGDClassifier(random_state=0, loss="hinge", fit_intercept=True, learning_rate="constant"), SGDClassifier(random_state=0, loss="squared_hinge", eta0=1e-2, fit_intercept=True, learning_rate="constant"), SGDClassifier(random_state=0, loss="log", fit_intercept=True, learning_rate="constant"), SGDClassifier(random_state=0, loss="modified_huber", fit_intercept=True, learning_rate="constant"), ): clf.fit(data, bin_target) assert_greater(clf.score(data, bin_target), 0.934) assert_equal(list(clf.classes_), [0, 1]) if clf.loss in ('log', 'modified_huber'): check_predict_proba(clf, data) else: assert not hasattr(clf, 'predict_proba')
def test_adagrad_elastic_log(): clf = AdaGradClassifier(alpha=0.1, l1_ratio=0.85, loss="log", n_iter=10, random_state=0) clf.fit(X_bin, y_bin) assert_equal(clf.score(X_bin, y_bin), 1.0) check_predict_proba(clf, X_bin)
def test_fit_linear_binary_l1r_log_loss(): clf = CDClassifier(C=1.0, random_state=0, penalty="l1", loss="log") clf.fit(bin_dense, bin_target) check_predict_proba(clf, bin_dense) acc = clf.score(bin_dense, bin_target) assert_almost_equal(acc, 0.995)