Esempio n. 1
0
def test_lipschitz_constant_loss_logreg():
    rng = check_random_state(42)
    X, _, w, mask = _make_data(rng=rng, masked=True)
    l1_ratio = 1.
    alpha = .1
    grad_weight = alpha * X.shape[0] * (1. - l1_ratio)
    a = _logistic_derivative_lipschitz_constant(X, mask, grad_weight)
    b = _logistic_loss_lipschitz_constant(X)
    assert a == b
Esempio n. 2
0
def test_lipschitz_constant_loss_logreg():
    rng = check_random_state(42)
    X, _, w, mask = _make_data(rng=rng, masked=True)
    l1_ratio = 1.
    alpha = .1
    grad_weight = alpha * X.shape[0] * (1. - l1_ratio)
    a = _logistic_derivative_lipschitz_constant(X, mask, grad_weight)
    b = _logistic_loss_lipschitz_constant(X)
    assert_equal(a, b)
Esempio n. 3
0
def test_logistic_derivative_lipschitz_constant():
    # Tests Lipschitz-continuity of of the derivative of logistic loss
    rng = check_random_state(42)
    grad_weight = 2.08e-1
    lipschitz_constant = _logistic_derivative_lipschitz_constant(
        X, mask, grad_weight)
    for _ in range(20):
        x_1 = rng.rand((w.shape[0] + 1)) * rng.randint(1000)
        x_2 = rng.rand((w.shape[0] + 1)) * rng.randint(1000)
        gradient_difference = linalg.norm(
            _logistic_data_loss_and_spatial_grad_derivative(
                X, y, x_1, mask, grad_weight) -
            _logistic_data_loss_and_spatial_grad_derivative(
                X, y, x_2, mask, grad_weight))
        point_difference = linalg.norm(x_1 - x_2)
        assert gradient_difference <= lipschitz_constant * point_difference
Esempio n. 4
0
def test_logistic_derivative_lipschitz_constant():
    # Tests Lipschitz-continuity of of the derivative of logistic loss
    rng = check_random_state(42)
    grad_weight = 2.08e-1
    lipschitz_constant = _logistic_derivative_lipschitz_constant(
        X, mask, grad_weight)
    for _ in range(20):
        x_1 = rng.rand((w.shape[0] + 1)) * rng.randint(1000)
        x_2 = rng.rand((w.shape[0] + 1)) * rng.randint(1000)
        gradient_difference = extmath.norm(
            _logistic_data_loss_and_spatial_grad_derivative(
                X, y, x_1, mask, grad_weight)
            - _logistic_data_loss_and_spatial_grad_derivative(
                X, y, x_2, mask, grad_weight))
        point_difference = extmath.norm(x_1 - x_2)
        assert_true(
            gradient_difference <= lipschitz_constant * point_difference)