Exemplo n.º 1
0
    def _get_pattern_for_layer(self, layer, _state):
        layers = [
            l for l in kgraph.get_model_layers(self._model)
            if kchecks.contains_kernel(l)
        ]

        return self._patterns[layers.index(layer)]
Exemplo n.º 2
0
    def _create_analysis(self, *args, **kwargs):
        self._add_conditional_reverse_mapping(
            # Apply to all layers that contain a kernel
            lambda layer: kchecks.contains_kernel(layer),
            SimpleLRPRule,
            name="z_rule",
        )

        return super(SimpleLRP, self)._create_analysis(*args, **kwargs)
Exemplo n.º 3
0
    def _create_analysis(self, *args, **kwargs):

        low, high = self._bounds_low, self._bounds_high

        class BoundedProxyRule(lrp_rules.BoundedRule):
            def __init__(self, *args, **kwargs):
                super().__init__(*args, low=low, high=high, **kwargs)

        self._add_conditional_reverse_mapping(
            lambda l: kchecks.is_input_layer(l) and kchecks.contains_kernel(l),
            BoundedProxyRule,
            name="deep_taylor_first_layer_bounded",
            priority=10,  # do first
        )

        return super()._create_analysis(*args, **kwargs)
Exemplo n.º 4
0
def get_layer_neuronwise_io(
    layer: Layer,
    node_index: int = 0,
    Xs: List[Tensor] = None,
    Ys: List[Tensor] = None,
    return_i: bool = True,
    return_o: bool = True,
) -> Union[Tuple[List[Tensor], List[Tensor]], List[Tensor]]:
    """Returns the input and output for each neuron in a layer

    Returns the symbolic input and output for each neuron in a layer.
    For a dense layer this is the input output itself.
    For convolutional layers this method extracts for each neuron
    the input output mapping.

    At the moment this function is designed
    to work with dense and conv2d layers.

    :param layer: The targeted layer.
    :param node_index: Index of the layer node to use.
    :param Xs: Ignore the layer's input but use Xs instead.
    :param Ys: Ignore the layer's output but use Ys instead.
    :param return_i: Return the inputs.
    :param return_o: Return the outputs.
    :return: Inputs and outputs, if specified, for each individual neuron.
    """
    if not kchecks.contains_kernel(layer):
        raise NotImplementedError()

    if Xs is None:
        Xs = iutils.to_list(layer.get_input_at(node_index))
    if Ys is None:
        Ys = iutils.to_list(layer.get_output_at(node_index))

    if isinstance(layer, keras.layers.Dense):
        # Xs and Ys are already in shape.
        ret_Xs = Xs
        ret_Ys = Ys
    elif isinstance(layer, keras.layers.Conv2D):
        kernel = get_kernel(layer)
        # Expect filter dimension to be last.
        n_channels = kernel.shape[-1]

        if return_i:
            extract_patches = ilayers.ExtractConv2DPatches(
                kernel.shape[:2],
                kernel.shape[2],
                layer.strides,
                layer.dilation_rate,
                layer.padding,
            )
            # shape [samples, out_row, out_col, weight_size]
            reshape = ilayers.Reshape((-1, np.product(kernel.shape[:3])))
            ret_Xs = [reshape(extract_patches(x)) for x in Xs]

        if return_o:
            # Get Ys into shape (samples, channels)
            if K.image_data_format() == "channels_first":
                # Ys shape is [samples, channels, out_row, out_col]
                def _reshape(x):
                    x = ilayers.Transpose((0, 2, 3, 1))(x)
                    x = ilayers.Reshape((-1, n_channels))(x)
                    return x

            else:
                # Ys shape is [samples, out_row, out_col, channels]
                def _reshape(x):
                    x = ilayers.Reshape((-1, n_channels))(x)
                    return x

            ret_Ys = [_reshape(x) for x in Ys]

    else:
        raise NotImplementedError()

    # Xs is (n, d) and Ys is (d, channels)
    if return_i and return_o:
        return ret_Xs, ret_Ys
    elif return_i:
        return ret_Xs
    elif return_o:
        return ret_Ys
    else:
        raise Exception()
Exemplo n.º 5
0
 def has_pattern(self):
     return kchecks.contains_kernel(self.layer)
Exemplo n.º 6
0
    def _create_analysis(self, *args: Any, **kwargs: Any):

        # Kernel layers.
        self._add_conditional_reverse_mapping(
            lambda l:
            (kchecks.contains_kernel(l) and kchecks.contains_activation(l)),
            lrp_rules.Alpha1Beta0IgnoreBiasRule,
            name="deep_taylor_kernel_w_relu",
        )
        self._add_conditional_reverse_mapping(
            lambda l: (kchecks.contains_kernel(l) and not kchecks.
                       contains_activation(l)),
            lrp_rules.WSquareRule,
            name="deep_taylor_kernel_wo_relu",
        )

        # ReLU Activation layer
        self._add_conditional_reverse_mapping(
            lambda l: (not kchecks.contains_kernel(l) and kchecks.
                       only_relu_activation(l)),
            self._gradient_reverse_mapping,
            name="deep_taylor_relu",
        )

        # Assume conv layer beforehand -> unbounded
        bn_mapping = kgraph.apply_mapping_to_fused_bn_layer(
            lrp_rules.WSquareRule,
            fuse_mode="one_linear",
        )
        self._add_conditional_reverse_mapping(
            kchecks.is_batch_normalization_layer,
            bn_mapping,
            name="deep_taylor_batch_norm",
        )
        # Special layers.
        self._add_conditional_reverse_mapping(
            kchecks.is_max_pooling,
            self._gradient_reverse_mapping,
            name="deep_taylor_max_pooling",
        )
        self._add_conditional_reverse_mapping(
            kchecks.is_average_pooling,
            self._gradient_reverse_mapping,
            name="deep_taylor_average_pooling",
        )
        self._add_conditional_reverse_mapping(
            lambda l: isinstance(l, keras.layers.Add),
            # Ignore scaling with 0.5
            self._gradient_reverse_mapping,
            name="deep_taylor_add",
        )
        self._add_conditional_reverse_mapping(
            lambda l: isinstance(
                l,
                (
                    keras.layers.convolutional.UpSampling1D,
                    keras.layers.convolutional.UpSampling2D,
                    keras.layers.convolutional.UpSampling3D,
                    keras.layers.core.Dropout,
                    keras.layers.core.SpatialDropout1D,
                    keras.layers.core.SpatialDropout2D,
                    keras.layers.core.SpatialDropout3D,
                ),
            ),
            self._gradient_reverse_mapping,
            name="deep_taylor_special_layers",
        )

        # Layers w/o transformation
        self._add_conditional_reverse_mapping(
            lambda l: isinstance(
                l,
                (
                    keras.engine.topology.InputLayer,
                    keras.layers.convolutional.Cropping1D,
                    keras.layers.convolutional.Cropping2D,
                    keras.layers.convolutional.Cropping3D,
                    keras.layers.convolutional.ZeroPadding1D,
                    keras.layers.convolutional.ZeroPadding2D,
                    keras.layers.convolutional.ZeroPadding3D,
                    keras.layers.Concatenate,
                    keras.layers.core.Flatten,
                    keras.layers.core.Masking,
                    keras.layers.core.Permute,
                    keras.layers.core.RepeatVector,
                    keras.layers.core.Reshape,
                ),
            ),
            self._gradient_reverse_mapping,
            name="deep_taylor_no_transform",
        )

        return super()._create_analysis(*args, **kwargs)