Beispiel #1
0
    def __init__(self,
                 name: str,
                 predictor_host: str,
                 method: ExplainerMethod,
                 config: Mapping,
                 explainer: object = None,
                 protocol: Protocol = Protocol.seldon_grpc,
                 tf_data_type: str = None,
                 keras_model: keras.Model = None ):
        super().__init__(name)
        self.predictor_host = predictor_host
        logging.info("Predict URL set to %s", self.predictor_host)
        self.method = method
        self.protocol = protocol
        self.tf_data_type = tf_data_type
        logging.info("Protocol is %s",str(self.protocol))

        # Add type for first value to help pass mypy type checks
        if self.method is ExplainerMethod.anchor_tabular:
            self.wrapper:ExplainerWrapper = AnchorTabular(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.anchor_images:
            self.wrapper = AnchorImages(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.anchor_text:
            self.wrapper = AnchorText(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.kernel_shap:
            self.wrapper = KernelShap(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.integrated_gradients:
            self.wrapper = IntegratedGradients(keras_model, **config)
        else:
            raise NotImplementedError
Beispiel #2
0
    def __init__(
        self,
        name: str,
        predict_fn: Callable,
        method: ExplainerMethod,
        config: Mapping,
        explainer: object = None,
        protocol: Protocol = Protocol.seldon_grpc,
        keras_model: keras.Model = None,
    ) -> None:
        super().__init__(name)
        self.method = method
        self.protocol = protocol
        logging.info("Protocol is %s", str(self.protocol))

        # Add type for first value to help pass mypy type checks
        if self.method is ExplainerMethod.anchor_tabular:
            self.wrapper: ExplainerWrapper = AnchorTabular(explainer, **config)
        elif self.method is ExplainerMethod.anchor_images:
            self.wrapper = AnchorImages(explainer, **config)
        elif self.method is ExplainerMethod.anchor_text:
            self.wrapper = AnchorText(predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.kernel_shap:
            self.wrapper = KernelShap(explainer, **config)
        elif self.method is ExplainerMethod.integrated_gradients:
            self.wrapper = IntegratedGradients(keras_model, **config)
        elif self.method is ExplainerMethod.tree_shap:
            self.wrapper = TreeShap(explainer, **config)
        elif self.method is ExplainerMethod.ale:
            self.wrapper = ALE(explainer, **config)
        else:
            raise NotImplementedError
def test_integrated_gradients():

    with tempfile.TemporaryDirectory() as model_dir:
        download_from_gs(IMDB_KERAS_MODEL_URI, model_dir)
        keras_model_path = os.path.join(model_dir, KERAS_MODEL_FILENAME)
        keras_model = keras.models.load_model(keras_model_path)
    integrated_gradients = IntegratedGradients(keras_model, layer=1)
    max_features = 10000
    maxlen = 100
    (x_train, y_train), (x_test,
                         y_test) = imdb.load_data(num_words=max_features)

    x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
    x_test = sequence.pad_sequences(x_test, maxlen=maxlen)

    explanation = integrated_gradients.explain(x_test[0:1].tolist())
    attrs = explanation["attributions"]
    assert len(attrs) > 0
Beispiel #4
0
def test_integrated_gradients():

    keras_model_path = os.path.join(
        kfserving.Storage.download(IMDB_KERAS_MODEL_URI), KERAS_MODEL_FILENAME)
    keras_model = keras.models.load_model(keras_model_path)
    integrated_gradients = IntegratedGradients(keras_model, layer=1)
    max_features = 10000
    maxlen = 100
    (x_train, y_train), (x_test,
                         y_test) = imdb.load_data(num_words=max_features)
    print(len(x_train), 'train sequences')
    print(len(x_test), 'test sequences')

    print('Pad sequences (samples x time)')
    x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
    x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
    print('x_train shape:', x_train.shape)
    print('x_test shape:', x_test.shape)

    explanation = integrated_gradients.explain(x_test[0:1].tolist())
    attrs = explanation["attributions"]
    assert len(attrs) > 0
    print(explanation)
Beispiel #5
0
class AlibiExplainer(ExplainerModel):
    def __init__(self,
                 name: str,
                 predictor_host: str,
                 method: ExplainerMethod,
                 config: Mapping,
                 explainer: object = None,
                 protocol: Protocol = Protocol.seldon_grpc,
                 tf_data_type: str = None,
                 keras_model: keras.Model = None ):
        super().__init__(name)
        self.predictor_host = predictor_host
        logging.info("Predict URL set to %s", self.predictor_host)
        self.method = method
        self.protocol = protocol
        self.tf_data_type = tf_data_type
        logging.info("Protocol is %s",str(self.protocol))

        # Add type for first value to help pass mypy type checks
        if self.method is ExplainerMethod.anchor_tabular:
            self.wrapper:ExplainerWrapper = AnchorTabular(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.anchor_images:
            self.wrapper = AnchorImages(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.anchor_text:
            self.wrapper = AnchorText(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.kernel_shap:
            self.wrapper = KernelShap(self._predict_fn, explainer, **config)
        elif self.method is ExplainerMethod.integrated_gradients:
            self.wrapper = IntegratedGradients(keras_model, **config)
        else:
            raise NotImplementedError

    def load(self):
        pass

    def _predict_fn(self, arr: Union[np.ndarray, List]) -> np.ndarray:
        print(arr)
        if type(arr) == list:
            arr = np.array(arr)
        if self.protocol == Protocol.seldon_grpc:
            return self._grpc(arr)
        elif self.protocol == Protocol.seldon_http:
            payload = seldon.create_request(arr, seldon.SeldonPayload.NDARRAY)
            response_raw = requests.post(SELDON_PREDICTOR_URL_FORMAT.format(self.predictor_host), json=payload)
            if response_raw.status_code == 200:
                rh = seldon.SeldonRequestHandler(response_raw.json())
                response_list = rh.extract_request()
                return np.array(response_list)
            else:
                raise Exception(
                    "Failed to get response from model return_code:%d" % response_raw.status_code)
        elif self.protocol == Protocol.tensorflow_http:
            instances = []
            for req_data in arr:
                if isinstance(req_data, np.ndarray):
                    instances.append(req_data.tolist())
                else:
                    instances.append(req_data)
            request = {"instances": instances}
            response = requests.post(
                KFSERVING_PREDICTOR_URL_FORMAT.format(self.predictor_host, self.name),
                json.dumps(request)
            )
            if response.status_code != 200:
                raise Exception(
                    "Failed to get response from model return_code:%d" % response.status_code)
            return np.array(response.json()["predictions"])

    def explain(self, request: Dict) -> Any:
        if self.method is ExplainerMethod.anchor_tabular or self.method is ExplainerMethod.anchor_images or \
                self.method is ExplainerMethod.anchor_text or self.method is ExplainerMethod.kernel_shap or \
                self.method is ExplainerMethod.integrated_gradients:
            if self.protocol == Protocol.tensorflow_http:
                explanation: Explanation = self.wrapper.explain(request["instances"])
            else:
                rh = seldon.SeldonRequestHandler(request)
                response_list = rh.extract_request()
                explanation = self.wrapper.explain(response_list)
            explanationAsJsonStr = explanation.to_json()
            logging.info("Explanation: %s", explanationAsJsonStr)
            return json.loads(explanationAsJsonStr)
        else:
            raise NotImplementedError

    def _grpc(self, arr: np.array) -> np.array:
        options = [
            ('grpc.max_send_message_length', GRPC_MAX_MSG_LEN),
            ('grpc.max_receive_message_length', GRPC_MAX_MSG_LEN)]
        channel = grpc.insecure_channel(self.predictor_host, options)
        stub = prediction_pb2_grpc.SeldonStub(channel)
        if self.tf_data_type is not None:
            datadef = prediction_pb2.DefaultData(
                tftensor=tf.make_tensor_proto(arr, self.tf_data_type))
        else:
            datadef = prediction_pb2.DefaultData(
                tftensor=tf.make_tensor_proto(arr))
        request = prediction_pb2.SeldonMessage(data=datadef)
        response = stub.Predict(request=request)
        arr_resp = tf.make_ndarray(response.data.tftensor)
        return arr_resp