def test_gb_regression(n_samples=1000):
    X, _ = generate_sample(n_samples, 10, distance=0.6)
    y = numpy.tanh(X.sum(axis=1))
    clf = UGradientBoostingRegressor(loss=MSELossFunction())
    clf.fit(X, y)
    y_pred = clf.predict(X)
    zeromse = 0.5 * mean_squared_error(y, y * 0.)
    assert mean_squared_error(y, y_pred) < zeromse, 'something wrong with regression quality'
Exemple #2
0
def test_gb_regression(n_samples=1000):
    X, _ = generate_sample(n_samples, 10, distance=0.6)
    y = numpy.tanh(X.sum(axis=1))
    clf = UGradientBoostingRegressor(loss=MSELossFunction())
    clf.fit(X, y)
    y_pred = clf.predict(X)
    zeromse = 0.5 * mean_squared_error(y, y * 0.)
    assert mean_squared_error(y, y_pred) < zeromse, 'something wrong with regression quality'
def test_constant_fitting(n_samples=1000, n_features=5):
    """
    Testing if initial constant fitted properly
    """
    X, y = generate_sample(n_samples=n_samples, n_features=n_features)
    y = y.astype(numpy.float) + 1000.
    for loss in [MSELossFunction(), losses.MAELossFunction()]:
        gb = UGradientBoostingRegressor(loss=loss, n_estimators=10)
        gb.fit(X, y)
        p = gb.predict(X)
        assert mean_squared_error(p, y) < 0.5
def test_constant_fitting(n_samples=1000, n_features=5):
    """
    Testing if initial constant fitted properly
    """
    X, y = generate_sample(n_samples=n_samples, n_features=n_features)
    y = y.astype(numpy.float) + 1000.
    for loss in [MSELossFunction(), losses.MAELossFunction()]:
        gb = UGradientBoostingRegressor(loss=loss, n_estimators=10)
        gb.fit(X, y)
        p = gb.predict(X)
        assert mean_squared_error(p, y) < 0.5