def _fit_and_plot(constraints, plotting_data): adjusted_predictor = ThresholdOptimizer( estimator=ExamplePredictor(scores_ex), constraints=constraints) adjusted_predictor.fit(plotting_data.X, plotting_data.y, sensitive_features=plotting_data.sensitive_features) fig, (ax) = plt.subplots(1, 1) plot_threshold_optimizer(adjusted_predictor, ax=ax, show_plot=False) return fig
def test_no_matplotlib(constraints): n_samples = 50 n_features = 50 n_sensitive_feature_values = 2 n_classes = 2 threshold_optimizer = ThresholdOptimizer(estimator=FakePredictor(), constraints=constraints) threshold_optimizer.fit(X=np.random.random((n_samples, n_features)), y=np.random.randint(n_classes, size=n_samples), sensitive_features=np.random.randint( n_sensitive_feature_values, size=n_samples)) with pytest.raises(RuntimeError) as exc: plot_threshold_optimizer(threshold_optimizer) assert str(exc.value) == _MATPLOTLIB_IMPORT_ERROR_MESSAGE
threshold_optimizer = ThresholdOptimizer( estimator=pipeline, constraints="demographic_parity", predict_method="predict_proba", prefit=False, ) threshold_optimizer.fit(X_train, y_train, sensitive_features=A_train) print(threshold_optimizer.predict(X_test, sensitive_features=A_test)) print( json.dumps( threshold_optimizer.interpolated_thresholder_.interpolation_dict, default=str, indent=4, )) plot_threshold_optimizer(threshold_optimizer) # %% # Similarly, :class:`fairlearn.reductions.ExponentiatedGradient` works with # pipelines. Since it requires the :code:`sample_weight` parameter of the # underlying estimator internally we need to provide it with the correct # way of passing :code:`sample_weight` to just the :code:`"classifier"` step # using the step name followed by two underscores and :code:`sample_weight`. exponentiated_gradient = ExponentiatedGradient( estimator=pipeline, constraints=DemographicParity(), sample_weight_name="classifier__sample_weight", ) exponentiated_gradient.fit(X_train, y_train, sensitive_features=A_train) print(exponentiated_gradient.predict(X_test))