Exemplo n.º 1
0
def _test_same(dataset):
    X, y = dataset
    if X.shape[1] == 1:
        # If we only have one column (which is also the sensitive one) we can't fit
        return True

    sensitive_cols = [0]
    X_without_sens = np.delete(X, sensitive_cols, axis=1)
    lr = LogisticRegression(
        penalty="none",
        solver="lbfgs",
        multi_class="ovr",
        dual=False,
        tol=1e-4,
        C=1.0,
        fit_intercept=True,
        intercept_scaling=1,
        class_weight=None,
        random_state=None,
        max_iter=100,
        verbose=0,
        warm_start=False,
        n_jobs=None,
        l1_ratio=None,
    )
    fair = EqualOpportunityClassifier(
        covariance_threshold=None,
        sensitive_cols=sensitive_cols,
        penalty="none",
        positive_target=True,
    )

    fair.fit(X, y)
    lr.fit(X_without_sens, y)

    normal_pred = lr.predict_proba(X_without_sens)
    fair_pred = fair.predict_proba(X)
    np.testing.assert_almost_equal(normal_pred, fair_pred, decimal=2)
    assert np.sum(
        lr.predict(X_without_sens) != fair.predict(X)) / len(X) < 0.01
Exemplo n.º 2
0
def _test_same(dataset):
    X, y = dataset
    if X.shape[1] == 1:
        # If we only have one column (which is also the sensitive one) we can't fit
        return True

    sensitive_cols = [0]
    X_without_sens = np.delete(X, sensitive_cols, axis=1)
    lr = LogisticRegression(penalty="none", solver="lbfgs")
    fair = EqualOpportunityClassifier(
        covariance_threshold=None,
        sensitive_cols=sensitive_cols,
        penalty="none",
        positive_target=True,
    )

    fair.fit(X, y)
    lr.fit(X_without_sens, y)

    normal_pred = lr.predict_proba(X_without_sens)
    fair_pred = fair.predict_proba(X)
    np.testing.assert_almost_equal(normal_pred, fair_pred, decimal=2)
    assert np.sum(
        lr.predict(X_without_sens) != fair.predict(X)) / len(X) < 0.01