def thresholdOptimizer(X_train, Y_train, A_train, model, constraint):
    """
    Parameters:
    y_train: input data for training the model
    X_train: list of ground truths
    constraints: either "demographic_parity" or "equalized_odds"
    
    Returns the predictions of the optimized model
    """
    postprocess_est = ThresholdOptimizer(estimator=model,
                                         constraints=constraint)

    # Balanced data set is obtained by sampling the same number of points from the majority class (Y=0)
    # as there are points in the minority class (Y=1)

    Y_train = pd.Series(Y_train)
    balanced_idx1 = X_train[[Y_train == 1]].index
    pp_train_idx = balanced_idx1.union(Y_train[Y_train == 0].sample(
        n=balanced_idx1.size, random_state=1234).index)

    X_train_balanced = X_train.loc[pp_train_idx, :]
    Y_train_balanced = Y_train.loc[pp_train_idx]
    A_train_balanced = A_train.loc[pp_train_idx]

    postprocess_est.fit(X_train_balanced,
                        Y_train_balanced,
                        sensitive_features=A_train_balanced)

    postprocess_preds = postprocess_est.predict(X_test,
                                                sensitive_features=A_test)

    return postprocess_preds
 def fit(self, _X, _Y, _classifier_name="logistic", _predictor="hard"):
     my_erm_classifier = erm_classifier(self.train_X, self.train_Y)
     my_erm_classifier.fit(self.train_X, self.train_Y, classifier_name=_classifier_name)
     self.model = ThresholdOptimizer(estimator=my_erm_classifier, \
             constraints="demographic_parity", prefit=True)
     self.model.fit(self.train_X, self.train_Y, \
             sensitive_features=self.sensitive_train, _predictor=_predictor) 
Beispiel #3
0
def test_threshold_optimization_equalized_odds_e2e(
        sensitive_features, sensitive_feature_names, expected_positive_p0,
        expected_positive_p1, expected_negative_p0, expected_negative_p1,
        X_transform, y_transform, sensitive_features_transform):
    X = X_transform(_format_as_list_of_lists(sensitive_features))
    y = y_transform(labels_ex)
    sensitive_features_ = sensitive_features_transform(sensitive_features)
    adjusted_predictor = ThresholdOptimizer(
        unconstrained_predictor=ExamplePredictor(), constraints=EQUALIZED_ODDS)
    adjusted_predictor.fit(X, y, sensitive_features=sensitive_features_)

    predictions = adjusted_predictor._pmf_predict(
        X, sensitive_features=sensitive_features_)

    # assert equalized odds
    for a in sensitive_feature_names:
        positive_indices = (np.array(sensitive_features) == a) * \
            (np.array(labels_ex) == 1)
        negative_indices = (np.array(sensitive_features) == a) * \
            (np.array(labels_ex) == 0)
        average_probs_positive_indices = np.average(
            predictions[positive_indices], axis=0)
        average_probs_negative_indices = np.average(
            predictions[negative_indices], axis=0)
        assert np.isclose(average_probs_positive_indices[0],
                          expected_positive_p0)
        assert np.isclose(average_probs_positive_indices[1],
                          expected_positive_p1)
        assert np.isclose(average_probs_negative_indices[0],
                          expected_negative_p0)
        assert np.isclose(average_probs_negative_indices[1],
                          expected_negative_p1)
Beispiel #4
0
def test_threshold_optimization_equalized_odds_e2e(data_X_y_sf):
    adjusted_predictor = ThresholdOptimizer(
        estimator=ExamplePredictor(scores_ex), constraints=EQUALIZED_ODDS)
    adjusted_predictor.fit(data_X_y_sf.X,
                           data_X_y_sf.y,
                           sensitive_features=data_X_y_sf.sensitive_features)

    predictions = adjusted_predictor._pmf_predict(
        data_X_y_sf.X, sensitive_features=data_X_y_sf.sensitive_features)

    expected_ps = _expected_ps_equalized_odds[data_X_y_sf.example_name]
    mapped_sensitive_features = _map_into_single_column(
        data_X_y_sf.sensitive_features)

    # assert equalized odds
    for a in data_X_y_sf.feature_names:
        pos_indices = (mapped_sensitive_features == a) * (labels_ex == 1)
        neg_indices = (mapped_sensitive_features == a) * (labels_ex == 0)
        average_probs_positive_indices = np.average(predictions[pos_indices],
                                                    axis=0)
        average_probs_negative_indices = np.average(predictions[neg_indices],
                                                    axis=0)
        assert np.isclose(average_probs_positive_indices[0],
                          expected_ps[_POS_P0])
        assert np.isclose(average_probs_positive_indices[1],
                          expected_ps[_POS_P1])
        assert np.isclose(average_probs_negative_indices[0],
                          expected_ps[_NEG_P0])
        assert np.isclose(average_probs_negative_indices[1],
                          expected_ps[_NEG_P1])
 def fit(self):
     self.erm_classifier.fit(self.train_X, self.train_Y)
     self.model = ThresholdOptimizer(estimator=self.erm_classifier,
                                     constraints=self.metric,
                                     prefit=True)
     self.model.fit(self.train_X,
                    self.train_Y,
                    sensitive_features=self.sensitive_train)
def run_thresholdoptimizer_classification(estimator):
    """Run classification test with ThresholdOptimizer."""
    X, Y, A = fetch_adult()

    to = ThresholdOptimizer(estimator=estimator, prefit=False)
    to.fit(X, Y, sensitive_features=A)

    results = to.predict(X, sensitive_features=A)
    assert results is not None
Beispiel #7
0
def _fit_and_plot(constraints, plotting_data):
    adjusted_predictor = ThresholdOptimizer(
        estimator=ExamplePredictor(scores_ex), constraints=constraints)
    adjusted_predictor.fit(plotting_data.X,
                           plotting_data.y,
                           sensitive_features=plotting_data.sensitive_features)
    fig, (ax) = plt.subplots(1, 1)
    plot_threshold_optimizer(adjusted_predictor, ax=ax, show_plot=False)
    return fig
def test_threshold_optimization_degenerate_labels(data_X_sf, y_transform, constraints):
    y = y_transform(degenerate_labels_ex)

    adjusted_predictor = ThresholdOptimizer(estimator=ExamplePredictor(scores_ex),
                                            constraints=constraints,
                                            predict_method='predict')

    feature_name = _degenerate_labels_feature_name[data_X_sf.example_name]
    with pytest.raises(ValueError, match=DEGENERATE_LABELS_ERROR_MESSAGE.format(feature_name)):
        adjusted_predictor.fit(data_X_sf.X, y,
                               sensitive_features=data_X_sf.sensitive_features)
def test_predict_output_0_or_1(data_X_y_sf, constraints):
    adjusted_predictor = ThresholdOptimizer(estimator=ExamplePredictor(scores_ex),
                                            constraints=constraints,
                                            predict_method='predict')
    adjusted_predictor.fit(data_X_y_sf.X, data_X_y_sf.y,
                           sensitive_features=data_X_y_sf.sensitive_features)

    predictions = adjusted_predictor.predict(
        data_X_y_sf.X, sensitive_features=data_X_y_sf.sensitive_features)
    for prediction in predictions:
        assert prediction in [0, 1]
def test_threshold_optimization_non_binary_labels(data_X_y_sf, constraints):
    non_binary_y = deepcopy(data_X_y_sf.y)
    non_binary_y[0] = 2

    adjusted_predictor = ThresholdOptimizer(estimator=ExamplePredictor(scores_ex),
                                            constraints=constraints,
                                            predict_method='predict')

    with pytest.raises(ValueError, match=_LABELS_NOT_0_1_ERROR_MESSAGE):
        adjusted_predictor.fit(data_X_y_sf.X, non_binary_y,
                               sensitive_features=data_X_y_sf.sensitive_features)
Beispiel #11
0
def test_threshold_optimization_degenerate_labels(X_transform, y_transform,
                                                  sensitive_features_transform,
                                                  constraints):
    X = X_transform(_format_as_list_of_lists(sensitive_features_ex1))
    y = y_transform(degenerate_labels_ex)
    sensitive_features = sensitive_features_transform(sensitive_features_ex1)

    adjusted_predictor = ThresholdOptimizer(
        unconstrained_predictor=ExamplePredictor(), constraints=constraints)

    with pytest.raises(ValueError,
                       match=DEGENERATE_LABELS_ERROR_MESSAGE.format('A')):
        adjusted_predictor.fit(X, y, sensitive_features=sensitive_features)
Beispiel #12
0
def test_inconsistent_input_data_types(X, y, sensitive_features, constraints):
    adjusted_predictor = ThresholdOptimizer(
        unconstrained_predictor=ExamplePredictor(), constraints=constraints)

    error_message = INPUT_DATA_FORMAT_ERROR_MESSAGE.format(
        type(X).__name__,
        type(y).__name__,
        type(sensitive_features).__name__)

    if X is None or y is None and sensitive_features is None:
        with pytest.raises(TypeError) as exception:
            adjusted_predictor.fit(X, y, sensitive_features=sensitive_features)
        assert str(exception.value) == error_message
Beispiel #13
0
def test_predict_output_0_or_1(sensitive_features, sensitive_feature_names,
                               X_transform, y_transform,
                               sensitive_features_transform, constraints):
    X = X_transform(_format_as_list_of_lists(sensitive_features))
    y = y_transform(labels_ex)
    sensitive_features_ = sensitive_features_transform(sensitive_features)
    adjusted_predictor = ThresholdOptimizer(
        unconstrained_predictor=ExamplePredictor(), constraints=constraints)
    adjusted_predictor.fit(X, y, sensitive_features=sensitive_features_)

    predictions = adjusted_predictor.predict(
        X, sensitive_features=sensitive_features_)
    for prediction in predictions:
        assert prediction in [0, 1]
def test_predict_different_argument_lengths(data_X_y_sf, constraints):
    adjusted_predictor = ThresholdOptimizer(
        estimator=ExamplePredictor(scores_ex),
        constraints=constraints,
        predict_method="predict",
    )
    adjusted_predictor.fit(data_X_y_sf.X,
                           data_X_y_sf.y,
                           sensitive_features=data_X_y_sf.sensitive_features)

    with pytest.raises(
            ValueError,
            match="Found input variables with inconsistent numbers of samples"
    ):
        adjusted_predictor.predict(
            data_X_y_sf.X,
            sensitive_features=data_X_y_sf.sensitive_features[:-1])

    with pytest.raises(
            ValueError,
            match="Found input variables with inconsistent numbers of samples"
    ):
        adjusted_predictor.predict(
            data_X_y_sf.X[:-1],
            sensitive_features=data_X_y_sf.sensitive_features)
def test_threshold_optimization_different_input_lengths(data_X_y_sf, constraints):
    n = len(X_ex)
    expected_exception_messages = {
        "inconsistent": 'Found input variables with inconsistent numbers of samples',
        "empty": 'Found array with 0 sample'
    }
    for permutation in [(0, 1), (1, 0)]:
        with pytest.raises(ValueError, match=expected_exception_messages['inconsistent']
                           .format("X, sensitive_features, and y")):
            adjusted_predictor = ThresholdOptimizer(
                estimator=ExamplePredictor(scores_ex),
                constraints=constraints,
                predict_method='predict')
            adjusted_predictor.fit(data_X_y_sf.X[:n - permutation[0]],
                                   data_X_y_sf.y[:n - permutation[1]],
                                   sensitive_features=data_X_y_sf.sensitive_features)

    # try providing empty lists in all combinations
    for permutation in [(0, n, 'inconsistent'), (n, 0, 'empty')]:
        adjusted_predictor = ThresholdOptimizer(
            estimator=ExamplePredictor(scores_ex),
            constraints=constraints,
            predict_method='predict')
        with pytest.raises(ValueError, match=expected_exception_messages[permutation[2]]):
            adjusted_predictor.fit(data_X_y_sf.X[:n - permutation[0]],
                                   data_X_y_sf.y[:n - permutation[1]],
                                   sensitive_features=data_X_y_sf.sensitive_features)
Beispiel #16
0
def test_predict_different_argument_lengths(sensitive_features,
                                            sensitive_feature_names,
                                            X_transform, y_transform,
                                            sensitive_features_transform,
                                            constraints):
    X = X_transform(_format_as_list_of_lists(sensitive_features))
    y = y_transform(labels_ex)
    sensitive_features_ = sensitive_features_transform(sensitive_features)
    adjusted_predictor = ThresholdOptimizer(
        unconstrained_predictor=ExamplePredictor(), constraints=constraints)
    adjusted_predictor.fit(X, y, sensitive_features=sensitive_features_)

    with pytest.raises(ValueError,
                       match=DIFFERENT_INPUT_LENGTH_ERROR_MESSAGE.format(
                           "X and sensitive_features")):
        adjusted_predictor.predict(
            X,
            sensitive_features=sensitive_features_transform(
                sensitive_features[:-1]))

    with pytest.raises(ValueError,
                       match=DIFFERENT_INPUT_LENGTH_ERROR_MESSAGE.format(
                           "X and sensitive_features")):
        adjusted_predictor.predict(X_transform(
            _format_as_list_of_lists(sensitive_features))[:-1],
                                   sensitive_features=sensitive_features_)
Beispiel #17
0
def test_threshold_optimization_different_input_lengths(
        X_transform, y_transform, sensitive_features_transform, constraints):
    n = len(sensitive_features_ex1)
    for permutation in [(0, 1), (1, 0)]:
        with pytest.raises(ValueError,
                           match=DIFFERENT_INPUT_LENGTH_ERROR_MESSAGE.format(
                               "X, sensitive_features, and y")):
            X = X_transform(
                _format_as_list_of_lists(sensitive_features_ex1)
                [:n - permutation[0]])
            y = y_transform(labels_ex[:n - permutation[1]])
            sensitive_features = sensitive_features_transform(
                sensitive_features_ex1)

            adjusted_predictor = ThresholdOptimizer(
                unconstrained_predictor=ExamplePredictor(),
                constraints=constraints)
            adjusted_predictor.fit(X, y, sensitive_features=sensitive_features)

    # try providing empty lists in all combinations
    for permutation in [(0, n), (n, 0)]:
        X = X_transform(
            _format_as_list_of_lists(sensitive_features_ex1)[:n -
                                                             permutation[0]])
        y = y_transform(labels_ex[:n - permutation[1]])
        sensitive_features = sensitive_features_transform(
            sensitive_features_ex1)

        adjusted_predictor = ThresholdOptimizer(
            unconstrained_predictor=ExamplePredictor(),
            constraints=constraints)
        with pytest.raises(ValueError, match=EMPTY_INPUT_ERROR_MESSAGE):
            adjusted_predictor.fit(X, y, sensitive_features=sensitive_features)
Beispiel #18
0
def test_no_matplotlib(constraints):
    n_samples = 50
    n_features = 50
    n_sensitive_feature_values = 2
    n_classes = 2

    threshold_optimizer = ThresholdOptimizer(unconstrained_predictor=FakePredictor(),
                                             constraints=constraints,
                                             plot=True)
    with pytest.raises(RuntimeError) as exc:
        threshold_optimizer.fit(X=np.random.random((n_samples, n_features)),
                                y=np.random.randint(n_classes, size=n_samples),
                                sensitive_features=np.random.randint(n_sensitive_feature_values,
                                                                     size=n_samples))
        assert str(exc.value) == _MATPLOTLIB_IMPORT_ERROR_MESSAGE
Beispiel #19
0
def test_threshold_optimization_non_binary_labels(X_transform, y_transform,
                                                  sensitive_features_transform,
                                                  constraints):
    non_binary_labels = copy.deepcopy(labels_ex)
    non_binary_labels[0] = 2

    X = X_transform(_format_as_list_of_lists(sensitive_features_ex1))
    y = y_transform(non_binary_labels)
    sensitive_features = sensitive_features_transform(sensitive_features_ex1)

    adjusted_predictor = ThresholdOptimizer(
        unconstrained_predictor=ExamplePredictor(), constraints=constraints)

    with pytest.raises(ValueError, match=NON_BINARY_LABELS_ERROR_MESSAGE):
        adjusted_predictor.fit(X, y, sensitive_features=sensitive_features)
def test_constraints_not_supported():
    with pytest.raises(ValueError, match=NOT_SUPPORTED_CONSTRAINTS_ERROR_MESSAGE):
        ThresholdOptimizer(estimator=ExamplePredictor(scores_ex),
                           constraints="UnsupportedConstraints",
                           predict_method='predict').fit(
                               X_ex, labels_ex,
                               sensitive_features=sensitive_features_ex1
                           )
def test_random_state_threshold_optimizer():
    """Test that the random_state argument works as expected.

    This test case reproduces the problem reported in issue 588 if the
    random_state does not work as intended within ThresholdOptimizer.
    https://github.com/fairlearn/fairlearn/issues/588
    """
    X_train, X_test, y_train, y_test, race_train, race_test = _get_test_data()

    # Train a simple logistic regression model
    lr = LogisticRegression(max_iter=1000, random_state=0)
    lr.fit(X_train, y_train)

    # Train threshold optimizer
    to = ThresholdOptimizer(estimator=lr,
                            constraints='equalized_odds',
                            grid_size=1000)
    to.fit(X_train, y_train, sensitive_features=race_train)

    # score groups
    y_pred_test = to.predict(X_test,
                             sensitive_features=race_test,
                             random_state=0)
    for _ in range(100):
        assert (y_pred_test == to.predict(X_test,
                                          sensitive_features=race_test,
                                          random_state=0)).all()
    assert (y_pred_test != to.predict(
        X_test, sensitive_features=race_test, random_state=1)).any()
Beispiel #22
0
def test_threshold_optimization_demographic_parity_e2e(data_X_y_sf):
    adjusted_predictor = ThresholdOptimizer(
        estimator=ExamplePredictor(scores_ex), constraints=DEMOGRAPHIC_PARITY)
    adjusted_predictor.fit(data_X_y_sf.X,
                           data_X_y_sf.y,
                           sensitive_features=data_X_y_sf.sensitive_features)
    predictions = adjusted_predictor._pmf_predict(
        data_X_y_sf.X, sensitive_features=data_X_y_sf.sensitive_features)

    expected_ps = _expected_ps_demographic_parity[data_X_y_sf.example_name]

    # assert demographic parity
    for sensitive_feature_name in data_X_y_sf.feature_names:
        average_probs = np.average(predictions[_map_into_single_column(
            data_X_y_sf.sensitive_features) == sensitive_feature_name],
                                   axis=0)
        assert np.isclose(average_probs[0], expected_ps[_P0])
        assert np.isclose(average_probs[1], expected_ps[_P1])
def test_both_predictor_and_estimator_error(constraints):
    with pytest.raises(ValueError,
                       match=EITHER_PREDICTOR_OR_ESTIMATOR_ERROR_MESSAGE):
        ThresholdOptimizer(unconstrained_predictor=ExamplePredictor(scores_ex),
                           estimator=ExampleEstimator(),
                           constraints=constraints).fit(
                               X_ex,
                               labels_ex,
                               sensitive_features=sensitive_features_ex1)
Beispiel #24
0
def test_predict_before_fit_error(X_transform, sensitive_features_transform,
                                  predict_method_name, constraints):
    X = X_transform(_format_as_list_of_lists(sensitive_features_ex1))
    sensitive_features = sensitive_features_transform(sensitive_features_ex1)
    adjusted_predictor = ThresholdOptimizer(
        unconstrained_predictor=ExamplePredictor(), constraints=constraints)

    with pytest.raises(ValueError, match=PREDICT_BEFORE_FIT_ERROR_MESSAGE):
        getattr(adjusted_predictor,
                predict_method_name)(X, sensitive_features=sensitive_features)
class demographic_parity_classifier(base_binary_classifier):
    def fit(self, _X, _Y, _classifier_name="logistic", _predictor="hard"):
        my_erm_classifier = erm_classifier(self.train_X, self.train_Y)
        my_erm_classifier.fit(self.train_X, self.train_Y, classifier_name=_classifier_name)
        self.model = ThresholdOptimizer(estimator=my_erm_classifier, \
                constraints="demographic_parity", prefit=True)
        self.model.fit(self.train_X, self.train_Y, \
                sensitive_features=self.sensitive_train, _predictor=_predictor) 
    
    def predict(self, x_samples, sensitive_features):
        y_samples = self.model.predict(x_samples, sensitive_features=sensitive_features)
        return y_samples
    
    def get_accuracy(self, X, y_true, sensitive_features):
        y_pred = self.predict(X, sensitive_features)
        return 1 - np.sum(np.power(y_pred - y_true, 2))/len(y_true) 

    def predict_proba(self, x_samples, sensitive_features):
        y_samples = self.model._pmf_predict(x_samples, sensitive_features=sensitive_features)
        return y_samples
def test_predict_method(predict_method):
    class Dummy(BaseEstimator, ClassifierMixin):
        def fit(self, X, y):
            return self

        def predict(self, X):
            raise Exception("predict")

        def predict_proba(self, X):
            raise Exception("predict_proba")

        def decision_function(self, X):
            raise Exception("decision_function")

    X, y = make_classification()
    sensitive_feature = np.random.randint(0, 2, len(y))
    clf = ThresholdOptimizer(estimator=Dummy(), predict_method=predict_method)
    exception = "predict_proba" if predict_method == "auto" else predict_method
    with pytest.raises(Exception, match=exception):
        clf.fit(X, y, sensitive_features=sensitive_feature)
def test_predict_before_fit_error(X_transform, sensitive_features_transform,
                                  predict_method_name, constraints):
    X = X_transform(sensitive_features_ex1)
    sensitive_features = sensitive_features_transform(sensitive_features_ex1)
    adjusted_predictor = ThresholdOptimizer(
        unconstrained_predictor=ExamplePredictor(scores_ex),
        constraints=constraints)

    with pytest.raises(ValueError, match='instance is not fitted yet'):
        getattr(adjusted_predictor,
                predict_method_name)(X, sensitive_features=sensitive_features)
def run_thresholdoptimizer_classification(estimator):
    """Run classification test with ThresholdOptimizer."""
    X_train, Y_train, A_train, X_test, Y_test, A_test = fetch_adult()

    unmitigated = copy.deepcopy(estimator)
    unmitigated.fit(X_train, Y_train)
    unmitigated_predictions = unmitigated.predict(X_test)

    to = ThresholdOptimizer(estimator=estimator,
                            prefit=False,
                            predict_method='predict')
    to.fit(X_train, Y_train, sensitive_features=A_train)

    mitigated_predictions = to.predict(X_test, sensitive_features=A_test)

    dp_diff_unmitigated = demographic_parity_difference(
        Y_test, unmitigated_predictions, sensitive_features=A_test)

    dp_diff_mitigated = demographic_parity_difference(
        Y_test, mitigated_predictions, sensitive_features=A_test)
    assert dp_diff_mitigated <= dp_diff_unmitigated
Beispiel #29
0
def test_threshold_optimization_demographic_parity_e2e(
        sensitive_features, sensitive_feature_names, expected_p0, expected_p1,
        X_transform, y_transform, sensitive_features_transform):
    X = X_transform(_format_as_list_of_lists(sensitive_features))
    y = y_transform(labels_ex)
    sensitive_features_ = sensitive_features_transform(sensitive_features)
    adjusted_predictor = ThresholdOptimizer(
        unconstrained_predictor=ExamplePredictor(),
        constraints=DEMOGRAPHIC_PARITY)
    adjusted_predictor.fit(X, y, sensitive_features=sensitive_features_)

    predictions = adjusted_predictor._pmf_predict(
        X, sensitive_features=sensitive_features_)

    # assert demographic parity
    for sensitive_feature_name in sensitive_feature_names:
        average_probs = np.average(predictions[np.array(sensitive_features) ==
                                               sensitive_feature_name],
                                   axis=0)
        assert np.isclose(average_probs[0], expected_p0)
        assert np.isclose(average_probs[1], expected_p1)
def test_constraints_objective_pairs(constraints, objective):
    X = pd.Series(
        [0, 1, 2, 3, 4, 0, 1, 2, 3]).to_frame()
    sf = pd.Series(
        [0, 0, 0, 0, 0, 1, 1, 1, 1])
    y = pd.Series(
        [1, 0, 1, 1, 1, 0, 1, 1, 1])
    thr_optimizer = ThresholdOptimizer(
        estimator=PassThroughPredictor(),
        constraints=constraints,
        objective=objective,
        grid_size=20,
        predict_method='predict')
    expected = results[constraints+", "+objective]
    if type(expected) is str:
        with pytest.raises(ValueError) as error_info:
            thr_optimizer.fit(X, y, sensitive_features=sf)
        assert str(error_info.value).startswith(expected)
    else:
        thr_optimizer.fit(X, y, sensitive_features=sf)
        res = thr_optimizer.interpolated_thresholder_.interpolation_dict
        for key in [0, 1]:
            assert res[key]['p0'] == pytest.approx(expected[key]['p0'], PREC)
            assert res[key]['operation0']._operator == expected[key]['op0']
            assert res[key]['operation0']._threshold == pytest.approx(expected[key]['thr0'], PREC)
            assert res[key]['p1'] == pytest.approx(expected[key]['p1'], PREC)
            assert res[key]['operation1']._operator == expected[key]['op1']
            assert res[key]['operation1']._threshold == pytest.approx(expected[key]['thr1'], PREC)
            if 'p_ignore' in expected[key]:
                assert res[key]['p_ignore'] == pytest.approx(expected[key]['p_ignore'], PREC)
                assert res[key]['prediction_constant'] == \
                    pytest.approx(expected[key]['prediction_constant'], PREC)
            else:
                assert 'p_ignore' not in res[key]