def test_save_AnchorImage(ai_explainer, mnist_predictor): X = np.random.rand(28, 28, 1) exp0 = ai_explainer.explain(X) with tempfile.TemporaryDirectory() as temp_dir: ai_explainer.save(temp_dir) ai_explainer1 = load_explainer(temp_dir, predictor=mnist_predictor) assert isinstance(ai_explainer1, AnchorImage) assert ai_explainer.meta == ai_explainer1.meta exp1 = ai_explainer1.explain(X) assert exp0.meta == exp1.meta
def test_anchor_tabular(): skmodel = SKLearnServer(IRIS_MODEL_URI) skmodel.load() with tempfile.TemporaryDirectory() as alibi_model_dir: make_anchor_tabular(alibi_model_dir) alibi_model = load_explainer(predictor=skmodel.predict, path=alibi_model_dir) anchor_tabular = AnchorTabular(alibi_model) test_data = np.array([[5.964, 4.006, 2.081, 1.031]]) explanation = anchor_tabular.explain(test_data) explanation_json = json.loads(explanation.to_json()) assert explanation_json["meta"]["name"] == "AnchorTabular"
def load(cls, path: Union[str, os.PathLike], predictor: Any) -> "Explainer": """ Load an explainer from disk. Parameters ---------- path Path to a directory containing the saved explainer. predictor Model or prediction function used to originally initialize the explainer. Returns ------- An explainer instance. """ return load_explainer(path, predictor)
def test_save_TreeShap(tree_explainer, rf_classifier, iris_data): X = iris_data['X_test'] exp0 = tree_explainer.explain(X) with tempfile.TemporaryDirectory() as temp_dir: tree_explainer.save(temp_dir) tree_explainer1 = load_explainer(temp_dir, predictor=rf_classifier) assert isinstance(tree_explainer1, TreeShap) assert tree_explainer.meta == tree_explainer1.meta exp1 = tree_explainer1.explain(X) assert exp0.meta == exp1.meta # TreeShap is deterministic assert_allclose(exp0.shap_values[0], exp1.shap_values[0])
def test_save_KernelShap(kshap_explainer, lr_classifier, adult_data): predictor = predict_fcn(predict_type='proba', clf=lr_classifier, preproc=adult_data['preprocessor']) X = adult_data['X_test'][:2] exp0 = kshap_explainer.explain(X) with tempfile.TemporaryDirectory() as temp_dir: kshap_explainer.save(temp_dir) kshap_explainer1 = load_explainer(temp_dir, predictor=predictor) assert isinstance(kshap_explainer1, KernelShap) assert kshap_explainer.meta == kshap_explainer1.meta exp1 = kshap_explainer.explain(X) assert exp0.meta == exp1.meta
def test_save_AnchorTabular(atab_explainer, lr_classifier, adult_data): predictor = predict_fcn(predict_type='class', clf=lr_classifier, preproc=adult_data['preprocessor']) X = adult_data['X_test'][0] exp0 = atab_explainer.explain(X) with tempfile.TemporaryDirectory() as temp_dir: atab_explainer.save(temp_dir) atab_explainer1 = load_explainer(temp_dir, predictor=predictor) assert isinstance(atab_explainer1, AnchorTabular) assert atab_explainer.meta == atab_explainer1.meta exp1 = atab_explainer1.explain(X) assert exp0.meta == exp1.meta
def test_save_IG(ig_explainer, ffn_classifier, iris_data): X = iris_data['X_test'] target = iris_data['y_test'] exp0 = ig_explainer.explain(X, target=target) with tempfile.TemporaryDirectory() as temp_dir: ig_explainer.save(temp_dir) ig_explainer1 = load_explainer(temp_dir, predictor=ffn_classifier) assert isinstance(ig_explainer1, IntegratedGradients) assert ig_explainer.meta == ig_explainer1.meta exp1 = ig_explainer.explain(X, target=target) assert exp0.meta == exp1.meta # IG is deterministic assert np.all(exp0.attributions[0] == exp1.attributions[0])
def test_save_ALE(ale_explainer, lr_classifier, iris_data): X = iris_data['X_test'] exp0 = ale_explainer.explain(X) with tempfile.TemporaryDirectory() as temp_dir: ale_explainer.save(temp_dir) ale_explainer1 = load_explainer(temp_dir, predictor=lr_classifier.predict_proba) assert isinstance(ale_explainer1, ALE) # TODO: cannot pass as meta updated after explain # assert ale_explainer.meta == ale_explainer1.meta exp1 = ale_explainer1.explain(X) # ALE explanations are deterministic assert exp0.meta == exp1.meta # assert exp0.data == exp1.data # cannot compare as many types instide TODO: define equality for explanations? # or compare pydantic schemas? assert np.all(exp0.ale_values[0] == exp1.ale_values[0])
def get_persisted_explainer(dirname, predict_fn: Callable) -> Explainer: logging.info(f"Loading Alibi model from {dirname}") return load_explainer(predictor=predict_fn, path=dirname)