Beispiel #1
0
def test_tree_shap():
    np.random.seed(0)

    alibi_model = make_tree_shap()
    tree_shap = TreeShap(alibi_model)
    adult = fetch_adult()
    X_test = adult.data[30001:, :]
    explanation = tree_shap.explain(X_test[0:1].tolist())
    exp_json = json.loads(explanation.to_json())
    assert exp_json["meta"]["name"] == "TreeShap"
Beispiel #2
0
def test_tree_shap():
    os.environ.clear()
    alibi_model = os.path.join(kfserving.Storage.download(ADULT_EXPLAINER_URI),
                               EXPLAINER_FILENAME)
    with open(alibi_model, "rb") as f:
        alibi_model = dill.load(f)
        tree_shap = TreeShap(alibi_model)
        adult = fetch_adult()
        X_test = adult.data[30001:, :]
        np.random.seed(0)
        explanation = tree_shap.explain(X_test[0:1].tolist())
        exp_json = json.loads(explanation.to_json())
        print(exp_json)
Beispiel #3
0
    def __init__(
        self,
        name: str,
        predict_fn: Callable,
        method: ExplainerMethod,
        config: Mapping,
        explainer: object = None,
        protocol: Protocol = Protocol.seldon_grpc,
        keras_model: keras.Model = None,
    ) -> None:
        super().__init__(name)
        self.method = method
        self.protocol = protocol
        logging.info("Protocol is %s", str(self.protocol))

        # Add type for first value to help pass mypy type checks
        if self.method is ExplainerMethod.anchor_tabular:
            self.wrapper: ExplainerWrapper = AnchorTabular(explainer, **config)
        elif self.method is ExplainerMethod.anchor_images:
            self.wrapper = AnchorImages(explainer, **config)
        elif self.method is ExplainerMethod.anchor_text:
            self.wrapper = AnchorText(predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.kernel_shap:
            self.wrapper = KernelShap(explainer, **config)
        elif self.method is ExplainerMethod.integrated_gradients:
            self.wrapper = IntegratedGradients(keras_model, **config)
        elif self.method is ExplainerMethod.tree_shap:
            self.wrapper = TreeShap(explainer, **config)
        elif self.method is ExplainerMethod.ale:
            self.wrapper = ALE(explainer, **config)
        else:
            raise NotImplementedError
Beispiel #4
0
    def __init__(self,
                 name: str,
                 predictor_host: str,
                 method: ExplainerMethod,
                 config: Mapping,
                 explainer: object = None,
                 protocol: Protocol = Protocol.seldon_grpc,
                 tf_data_type: str = None,
                 keras_model: keras.Model = None):
        super().__init__(name)
        self.predictor_host = predictor_host
        logging.info("Predict URL set to %s", self.predictor_host)
        self.method = method
        self.protocol = protocol
        self.tf_data_type = tf_data_type
        logging.info("Protocol is %s", str(self.protocol))
        self.v2_name = None
        self.v2_type = None
        self.v2_model_name = None

        # Add type for first value to help pass mypy type checks
        if self.method is ExplainerMethod.anchor_tabular:
            self.wrapper: ExplainerWrapper = AnchorTabular(
                self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.anchor_images:
            self.wrapper = AnchorImages(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.anchor_text:
            self.wrapper = AnchorText(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.kernel_shap:
            self.wrapper = KernelShap(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.integrated_gradients:
            self.wrapper = IntegratedGradients(keras_model, **config)
        elif self.method is ExplainerMethod.tree_shap:
            self.wrapper = TreeShap(explainer, **config)
        else:
            raise NotImplementedError
Beispiel #5
0
class AlibiExplainer(ExplainerModel):
    def __init__(self,
                 name: str,
                 predictor_host: str,
                 method: ExplainerMethod,
                 config: Mapping,
                 explainer: object = None,
                 protocol: Protocol = Protocol.seldon_grpc,
                 tf_data_type: str = None,
                 keras_model: keras.Model = None):
        super().__init__(name)
        self.predictor_host = predictor_host
        logging.info("Predict URL set to %s", self.predictor_host)
        self.method = method
        self.protocol = protocol
        self.tf_data_type = tf_data_type
        logging.info("Protocol is %s", str(self.protocol))

        # Add type for first value to help pass mypy type checks
        if self.method is ExplainerMethod.anchor_tabular:
            self.wrapper: ExplainerWrapper = AnchorTabular(
                self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.anchor_images:
            self.wrapper = AnchorImages(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.anchor_text:
            self.wrapper = AnchorText(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.kernel_shap:
            self.wrapper = KernelShap(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.integrated_gradients:
            self.wrapper = IntegratedGradients(keras_model, **config)
        elif self.method is ExplainerMethod.tree_shap:
            self.wrapper = TreeShap(explainer, **config)
        else:
            raise NotImplementedError

    def load(self):
        pass

    def _predict_fn(self, arr: Union[np.ndarray, List]) -> np.ndarray:
        print(arr)
        if type(arr) == list:
            arr = np.array(arr)
        if self.protocol == Protocol.seldon_grpc:
            return self._grpc(arr)
        elif self.protocol == Protocol.seldon_http:
            payload = seldon.create_request(arr, seldon.SeldonPayload.NDARRAY)
            response_raw = requests.post(SELDON_PREDICTOR_URL_FORMAT.format(
                self.predictor_host),
                                         json=payload)
            if response_raw.status_code == 200:
                rh = seldon.SeldonRequestHandler(response_raw.json())
                response_list = rh.extract_request()
                return np.array(response_list)
            else:
                raise Exception(
                    "Failed to get response from model return_code:%d" %
                    response_raw.status_code)
        elif self.protocol == Protocol.tensorflow_http:
            instances = []
            for req_data in arr:
                if isinstance(req_data, np.ndarray):
                    instances.append(req_data.tolist())
                else:
                    instances.append(req_data)
            request = {"instances": instances}
            response = requests.post(
                TENSORFLOW_PREDICTOR_URL_FORMAT.format(self.predictor_host),
                json.dumps(request))
            if response.status_code != 200:
                raise Exception(
                    "Failed to get response from model return_code:%d" %
                    response.status_code)
            return np.array(response.json()["predictions"])

    def explain(self, request: Dict) -> Any:
        if self.method is ExplainerMethod.anchor_tabular or self.method is ExplainerMethod.anchor_images or \
                self.method is ExplainerMethod.anchor_text or self.method is ExplainerMethod.kernel_shap or \
                self.method is ExplainerMethod.integrated_gradients or self.method is ExplainerMethod.tree_shap:
            if self.protocol == Protocol.tensorflow_http:
                explanation: Explanation = self.wrapper.explain(
                    request["instances"])
            else:
                rh = seldon.SeldonRequestHandler(request)
                response_list = rh.extract_request()
                explanation = self.wrapper.explain(response_list)
            explanationAsJsonStr = explanation.to_json()
            logging.info("Explanation: %s", explanationAsJsonStr)
            return json.loads(explanationAsJsonStr)
        else:
            raise NotImplementedError

    def _grpc(self, arr: np.array) -> np.array:
        options = [('grpc.max_send_message_length', GRPC_MAX_MSG_LEN),
                   ('grpc.max_receive_message_length', GRPC_MAX_MSG_LEN)]
        channel = grpc.insecure_channel(self.predictor_host, options)
        stub = prediction_pb2_grpc.SeldonStub(channel)
        if self.tf_data_type is not None:
            datadef = prediction_pb2.DefaultData(
                tftensor=tf.make_tensor_proto(arr, self.tf_data_type))
        else:
            datadef = prediction_pb2.DefaultData(
                tftensor=tf.make_tensor_proto(arr))
        request = prediction_pb2.SeldonMessage(data=datadef)
        response = stub.Predict(request=request)
        arr_resp = tf.make_ndarray(response.data.tftensor)
        return arr_resp