def test_logistic_loss_derivative(n_samples=4, n_features=10, decimal=5):
    rng = np.random.RandomState(42)
    X = rng.randn(n_samples, n_features)
    y = rng.randn(n_samples)
    n_features = X.shape[1]
    w = rng.randn(n_features + 1)
    np.testing.assert_almost_equal(check_grad(
        lambda w: _logistic(X, y, w),
        lambda w: _logistic_loss_grad(X, y, w), w), 0., decimal=decimal)

    np.testing.assert_almost_equal(check_grad(
        lambda w: _logistic(X, y, w),
        lambda w: _logistic_loss_grad(X, y, w), w), 0., decimal=decimal)
def test_logistic_loss_derivative(n_samples=4, n_features=10, decimal=5):
    rng = np.random.RandomState(42)
    X = rng.randn(n_samples, n_features)
    y = rng.randn(n_samples)
    n_features = X.shape[1]
    w = rng.randn(n_features + 1)
    np.testing.assert_almost_equal(
        check_grad(lambda w: _logistic(X, y, w), lambda w: _logistic_loss_grad(X, y, w), w), 0.0, decimal=decimal
    )

    np.testing.assert_almost_equal(
        check_grad(lambda w: _logistic(X, y, w), lambda w: _logistic_loss_grad(X, y, w), w), 0.0, decimal=decimal
    )
Beispiel #3
0
def test_logistic_lipschitz(n_samples=4, n_features=2, random_state=42):
    rng = np.random.RandomState(random_state)

    for scaling in np.logspace(-3, 3, num=7):
        X = rng.randn(n_samples, n_features) * scaling
        y = rng.randn(n_samples)
        n_features = X.shape[1]

        L = _logistic_loss_lipschitz_constant(X)
        _check_lipschitz_continuous(lambda w: _logistic(X, y, w),
                                    n_features + 1, L)
Beispiel #4
0
def test_logistic_lipschitz(n_samples=4, n_features=2, random_state=42):
    rng = np.random.RandomState(random_state)

    for scaling in np.logspace(-3, 3, num=7):
        X = rng.randn(n_samples, n_features) * scaling
        y = rng.randn(n_samples)
        n_features = X.shape[1]

        L = _logistic_loss_lipschitz_constant(X)
        _check_lipschitz_continuous(lambda w: _logistic(
            X, y, w), n_features + 1, L)