def test_probability(): """Predict probabilities.""" rng = check_random_state(0) X_train, X_test, y_train, y_test = train_test_split(iris.data, iris.target, random_state=rng) with np.errstate(divide="ignore", invalid="ignore"): # Normal case print("start") ensemble = LazyBaggingClassifier(random_state=rng).fit(X_train, y_train) assert_array_almost_equal(np.sum(ensemble.predict_proba(X_test), axis=1), np.ones(len(X_test))) assert_array_almost_equal(ensemble.predict_proba(X_test), np.exp(ensemble.predict_log_proba(X_test))) print("stop") # Degenerate case, where some classes are missing ensemble = LazyBaggingClassifier(base_estimator=DecisionTreeClassifier(), random_state=rng, max_samples=5).fit(X_train, y_train) assert_array_almost_equal(np.sum(ensemble.predict_proba(X_test), axis=1), np.ones(len(X_test))) assert_array_almost_equal(ensemble.predict_proba(X_test), np.exp(ensemble.predict_log_proba(X_test)))
def test_multioutput(): X, y = make_multilabel_classification(n_samples=100, n_labels=1, n_classes=5, random_state=0, return_indicator=True) X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0) est = LazyBaggingClassifier(random_state=0, n_estimators=10, bootstrap=False) est.fit(X_train, y_train) assert_almost_equal(est.score(X_train, y_train), 1.) y_proba = est.predict_proba(X_test) y_log_proba = est.predict_log_proba(X_test) for p, log_p in zip(y_proba, y_log_proba): assert_array_almost_equal(p, np.exp(log_p)) est = LazyBaggingRegressor(random_state=0, n_estimators=10, bootstrap=False) est.fit(X_train, y_train) assert_almost_equal(est.score(X_train, y_train), 1.)