Ejemplo n.º 1
0
    X, y = make_classification(random_state=0)
    y[:] = 1  # constant target will lead to a single root node

    est = Est(max_iter=20)
    est.fit(X, y)

    assert all(len(predictor[0].nodes) == 1 for predictor in est._predictors)
    assert all(predictor[0].nodes[0]['value'] == 0
               for predictor in est._predictors)
    # Still gives correct predictions thanks to the baseline prediction
    assert_allclose(est.predict(X), y)


@pytest.mark.parametrize(
    'Est, loss, X, y',
    [(HistGradientBoostingClassifier, BinaryCrossEntropy(sample_weight=None),
      X_classification, y_classification),
     (HistGradientBoostingRegressor, LeastSquares(sample_weight=None),
      X_regression, y_regression)])
def test_custom_loss(Est, loss, X, y):
    est = Est(loss=loss, max_iter=20)
    est.fit(X, y)


@pytest.mark.parametrize('HistGradientBoosting, X, y', [
    (HistGradientBoostingClassifier, X_classification, y_classification),
    (HistGradientBoostingRegressor, X_regression, y_regression),
    (HistGradientBoostingClassifier, X_multi_classification,
     y_multi_classification),
])
def test_staged_predict(HistGradientBoosting, X, y):
Ejemplo n.º 2
0
    est = Est(max_iter=20)
    est.fit(X, y)

    assert all(len(predictor[0].nodes) == 1 for predictor in est._predictors)
    assert all(predictor[0].nodes[0]["value"] == 0
               for predictor in est._predictors)
    # Still gives correct predictions thanks to the baseline prediction
    assert_allclose(est.predict(X), y)


@pytest.mark.parametrize(
    "Est, loss, X, y",
    [
        (
            HistGradientBoostingClassifier,
            BinaryCrossEntropy(sample_weight=None),
            X_classification,
            y_classification,
        ),
        (
            HistGradientBoostingRegressor,
            LeastSquares(sample_weight=None),
            X_regression,
            y_regression,
        ),
    ],
)
def test_custom_loss(Est, loss, X, y):
    est = Est(loss=loss, max_iter=20)
    est.fit(X, y)