def test_logistic_derivative_lipschitz_constant(): # Tests Lipschitz-continuity of of the derivative of logistic loss rng = check_random_state(42) grad_weight = 2.08e-1 lipschitz_constant = _logistic_derivative_lipschitz_constant( X, mask, grad_weight) for _ in range(20): x_1 = rng.rand((w.shape[0] + 1)) * rng.randint(1000) x_2 = rng.rand((w.shape[0] + 1)) * rng.randint(1000) gradient_difference = linalg.norm( _logistic_data_loss_and_spatial_grad_derivative( X, y, x_1, mask, grad_weight) - _logistic_data_loss_and_spatial_grad_derivative( X, y, x_2, mask, grad_weight)) point_difference = linalg.norm(x_1 - x_2) assert gradient_difference <= lipschitz_constant * point_difference
def test_logistic_derivative_lipschitz_constant(): # Tests Lipschitz-continuity of of the derivative of logistic loss rng = check_random_state(42) grad_weight = 2.08e-1 lipschitz_constant = _logistic_derivative_lipschitz_constant( X, mask, grad_weight) for _ in range(20): x_1 = rng.rand((w.shape[0] + 1)) * rng.randint(1000) x_2 = rng.rand((w.shape[0] + 1)) * rng.randint(1000) gradient_difference = extmath.norm( _logistic_data_loss_and_spatial_grad_derivative( X, y, x_1, mask, grad_weight) - _logistic_data_loss_and_spatial_grad_derivative( X, y, x_2, mask, grad_weight)) point_difference = extmath.norm(x_1 - x_2) assert_true( gradient_difference <= lipschitz_constant * point_difference)
def test_logistic_gradient_at_simple_points(): # Tests gradient of logistic data loss function in points near to zero. # This is a not so hard test, just for detecting big errors X, y, w, mask = create_graph_net_simulation_data(n_samples=10, size=4) grad_weight = 1 # Add the intercept w = np.append(w, 0) func = lambda w: _logistic_data_loss_and_spatial_grad( X, y, w, mask, grad_weight) func_grad = lambda w: _logistic_data_loss_and_spatial_grad_derivative( X, y, w, mask, grad_weight) for i in range(0, w.size, 7): point = np.zeros(*w.shape) point[i] = 1 assert_almost_equal(sp.optimize.check_grad(func, func_grad, point), 0, decimal=3)