Beispiel #1
0
 def fit(
     self, X: Optional[np.ndarray] = None, batch_size: int = 32, **kwargs
 ) -> None:
     """
     Takes the same parameters as Keras's :func:`model.fit` function.
     """
     generator = iutils.BatchSequence(X, batch_size)
     return self._fit_generator(generator, **kwargs)  # type: ignore
Beispiel #2
0
    def compute(self, X, batch_size=32, verbose=0):
        """
        Compute and return the patterns for the model and the data `X`.

        :param X: Data to compute patterns.
        :param batch_size: Batch size to use.
        :param verbose: As for keras model.fit.
        """
        generator = iutils.BatchSequence(X, batch_size)
        return self.compute_generator(generator, verbose=verbose)
def test_fast__PerturbationAnalysis():
    # Some test data
    if keras.backend.image_data_format() == "channels_first":
        input_shape = (2, 1, 4, 4)
    else:
        input_shape = (2, 4, 4, 1)
    x = np.arange(2 * 4 * 4).reshape(input_shape)
    generator = iutils.BatchSequence([x, np.zeros(x.shape[0])],
                                     batch_size=x.shape[0])

    # Simple model
    model = keras.models.Sequential([
        keras.layers.Flatten(input_shape=x.shape[1:]),
        keras.layers.Dense(1, use_bias=False),
    ])

    weights = np.arange(4 * 4 * 1).reshape((4 * 4, 1))
    model.layers[-1].set_weights([weights])
    model.compile(loss="mean_squared_error", optimizer="sgd")

    expected_output = np.array([[1240.0], [3160.0]])
    assert np.all(np.isclose(model.predict(x), expected_output))

    # Analyzer
    analyzer = innvestigate.create_analyzer("gradient",
                                            model,
                                            postprocess="abs")

    # Run perturbation analysis
    perturbation = innvestigate.tools.perturbate.Perturbation("zeros",
                                                              region_shape=(2,
                                                                            2),
                                                              in_place=False)

    perturbation_analysis = innvestigate.tools.perturbate.PerturbationAnalysis(
        analyzer,
        model,
        generator,
        perturbation,
        recompute_analysis=False,
        steps=3,
        regions_per_step=1,
        verbose=False,
    )

    scores = perturbation_analysis.compute_perturbation_analysis()

    expected_scores = np.array([5761600.0, 1654564.0, 182672.0, 21284.0])
    assert np.all(np.isclose(scores, expected_scores))

###############################################################################
###############################################################################
###############################################################################

if __name__ == "__main__":
    print("This script is deprecated and has been replaced by a Jupyter Notebook version (mnist_perturbation.ipynb).")
    num_classes = 10
    batch_size = 256
    data = fetch_data()
    data_preprocessed = (preprocess(data[0]), data[1],
                         preprocess(data[2]), data[3])
    x_test, y_test = data_preprocessed[2:]
    y_test = keras.utils.to_categorical(y_test, num_classes)
    generator = iutils.BatchSequence([x_test, y_test], batch_size=batch_size)

    # Build and train model
    model_without_softmax, model_with_softmax = create_model()
    train_model(model_with_softmax, data_preprocessed)
    model_without_softmax.set_weights(model_with_softmax.get_weights())

    # Setup analyzer
    method = ("lrp.z_baseline", {}, ivis.heatmap, "LRP-Z")
    analyzer = innvestigate.create_analyzer(method[0],
                                            model_without_softmax,
                                            **method[1])

    # Perturbation analysis
    perturbation_function = "mean"
    perturbation = Perturbation(perturbation_function, region_shape=(5, 5))