Exemple #1
0
    def _get_neuron_mask(self):
        """
        Select which neurons are considered for the pattern computation.
        """
        Ys = get_active_neuron_io(self.layer,
                                  self._active_node_indices,
                                  return_i=False,
                                  return_o=True)

        return ilayers.OnesLike()(Ys[0])
    def apply(self, Xs, Ys, Rs, reverse_state):
        grad = ilayers.GradientWRT(len(Xs))
        # Create dummy forward path to take the derivative below.
        Ys = kutils.apply(self._layer_wo_act_b, Xs)

        # Compute the sum of the weights.
        ones = ilayers.OnesLike()(Xs)
        Zs = iutils.to_list(self._layer_wo_act_b(ones))
        # Weight the incoming relevance.
        tmp = [ilayers.SafeDivide()([a, b]) for a, b in zip(Rs, Zs)]
        # Redistribute the relevances along the gradient.
        tmp = iutils.to_list(grad(Xs + Ys + tmp))
        return tmp
Exemple #3
0
    def get_stats_from_batch(self):
        # Get the neuron-wise I/O for this layer.
        layer = kgraph.copy_layer_wo_activation(self.layer,
                                                keep_bias=False,
                                                reuse_symbolic_tensors=False)
        # Readjust the layer nodes.
        for i in range(kgraph.get_layer_inbound_count(self.layer)):
            layer(self.layer.get_input_at(i))
        Xs, Ys = get_active_neuron_io(layer, self._active_node_indices)
        if len(Ys) != 1:
            raise ValueError("Assume that kernel layer have only one output.")
        X, Y = Xs[0], Ys[0]

        # Create layers that keep a running mean for the desired stats.
        self.mean_x = ilayers.RunningMeans()
        self.mean_y = ilayers.RunningMeans()
        self.mean_xy = ilayers.RunningMeans()

        # Compute mask and active neuron counts.
        mask = ilayers.AsFloatX()(self._get_neuron_mask())
        Y_masked = keras.layers.multiply([Y, mask])
        count = ilayers.CountNonZero(axis=0)(mask)
        count_all = ilayers.Sum(axis=0)(ilayers.OnesLike()(mask))

        # Get means ...
        def norm(x, count):
            return ilayers.SafeDivide(factor=1)([x, count])

        # ... along active neurons.
        mean_x = norm(ilayers.Dot()([ilayers.Transpose()(X), mask]), count)
        mean_xy = norm(ilayers.Dot()([ilayers.Transpose()(X), Y_masked]),
                       count)

        _, a = self.mean_x([mean_x, count])
        _, b = self.mean_xy([mean_xy, count])

        # ... along all neurons.
        mean_y = norm(ilayers.Sum(axis=0)(Y), count_all)
        _, c = self.mean_y([mean_y, count_all])

        # Create a dummy output to have a connected graph.
        # Needs to have the shape (mb_size, 1)
        dummy = keras.layers.Average()([a, b, c])
        return ilayers.Sum(axis=None)(dummy)
 def _head_mapping(self, X):
     return ilayers.OnesLike()(X)