Ejemplo n.º 1
0
def copy_layer(layer: Layer,
               keep_bias: bool = True,
               name_template: bool = None,
               weights: Optional[Union[List[Tensor], List[np.ndarray]]] = None,
               reuse_symbolic_tensors: bool = True,
               **kwargs) -> Layer:
    """Copy a Keras layer.

    :param layer: A layer that should be copied.
    :param keep_bias: Keep a potential bias.
    :param weights: Weights to set in the new layer.
      Options: np tensors, symbolic tensors, or None,
      in which case the weights from old_layers are used.
    :param reuse_symbolic_tensors: If the weights of the
      old_layer are used copy the symbolic ones or copy
      the Numpy weights.
    :return: The new layer instance.
    """
    config = layer.get_config()
    if name_template is None:
        config["name"] = None
    else:
        config["name"] = name_template % config["name"]
    if hasattr(layer, "use_bias"):
        if keep_bias is False and config.get("use_bias", True):
            config["use_bias"] = False
            if weights is None:
                if reuse_symbolic_tensors:
                    weights = layer.weights[:-1]
                else:
                    weights = layer.get_weights()[:-1]
    return get_layer_from_config(layer, config, weights=weights, **kwargs)
Ejemplo n.º 2
0
def update_symbolic_weights(layer: Layer,
                            weight_mapping: Dict[str, Tensor]) -> None:
    """Updates the symbolic tensors of a layer

    Updates the symbolic tensors of a layer by replacing them.

    Note this does not update the loss or anything alike!
    Use with caution!

    :param layer: Targeted layer.
    :param weight_mapping: Dict with attribute name and weight tensors
      as keys and values.
    """

    trainable_weight_ids = [id(x) for x in layer._trainable_weights]
    non_trainable_weight_ids = [id(x) for x in layer._non_trainable_weights]

    for name, weight in six.iteritems(weight_mapping):
        current_weight = getattr(layer, name)
        current_weight_id = id(current_weight)

        if current_weight_id in trainable_weight_ids:
            idx = trainable_weight_ids.index(current_weight_id)
            layer._trainable_weights[idx] = weight
        else:
            idx = non_trainable_weight_ids.index(current_weight_id)
            layer._non_trainable_weights[idx] = weight

        setattr(layer, name, weight)
Ejemplo n.º 3
0
    def meta_mapping(layer: Layer, reverse_state: Dict):
        # get bn params
        weights = layer.weights[:]  # copy array
        if layer.scale:
            gamma = weights.pop(0)
        else:
            gamma = K.ones_like(weights[0])
        if layer.center:
            beta = weights.pop(0)
        else:
            beta = K.zeros_like(weights[0])
        mean, variance = weights

        if fuse_mode == "one_linear":
            tmp = K.sqrt(variance**2 + layer.epsilon)
            tmp_k = gamma / tmp
            tmp_b = -mean / tmp + beta

            inputs = layer.get_input_at(0)
            surrogate_layer = ScaleLayer(tmp_k, tmp_b)
            # init layer
            surrogate_layer(inputs)
            actual_mapping = mapping(surrogate_layer, reverse_state).apply
        else:
            tmp = K.sqrt(variance**2 + layer.epsilon)
            tmp_k1 = 1 / tmp
            tmp_b1 = -mean / tmp
            tmp_k2 = gamma
            tmp_b2 = beta

            inputs = layer.get_input_at(0)
            surrogate_layer1 = ScaleLayer(tmp_k1, tmp_b1)
            surrogate_layer2 = ScaleLayer(tmp_k2, tmp_b2)
            # init layers
            surrogate_layer1(inputs)
            surrogate_layer2(inputs)
            # TODO (alber): update reverse state
            actual_mapping_1 = mapping(surrogate_layer1, reverse_state).apply
            actual_mapping_2 = mapping(surrogate_layer2, reverse_state).apply

            def actual_mapping(
                Xs: List[Tensor],
                Ys: List[Tensor],
                reversed_Ys: List[Tensor],
                reverse_state,
            ):

                X2s = kapply(surrogate_layer1, Xs)
                # Apply first mapping
                # TODO (alber): update reverse state
                reversed_X2s = actual_mapping_2(X2s, Ys, reversed_Ys,
                                                reverse_state)
                return actual_mapping_1(Xs, X2s, reversed_X2s, reverse_state)

        return actual_mapping
Ejemplo n.º 4
0
def get_layer_from_config(
    old_layer: Layer,
    new_config: Dict[str, Any],
    weights: Optional[Union[List[np.ndarray], List[Tensor]]] = None,
    reuse_symbolic_tensors: bool = True,
) -> Layer:
    """Creates a new layer from a config

    Creates a new layer given a changed config and weights etc.

    :param old_layer: A layer that shall be used as base.
    :param new_config: The config to create the new layer.
    :param weights: Weights to set in the new layer.
      Options: np tensors, symbolic tensors, or None,
      in which case the weights from old_layers are used.
    :param reuse_symbolic_tensors: If the weights of the
      old_layer are used copy the symbolic ones or copy
      the Numpy weights.
    :return: The new layer instance.
    """
    new_layer = old_layer.__class__.from_config(new_config)

    if weights is None:
        if reuse_symbolic_tensors:
            weights = old_layer.weights
        else:
            weights = old_layer.get_weights()

    if len(weights) > 0:
        input_shapes = old_layer.get_input_shape_at(0)
        # todo: inspect and set initializers to something fast for speedup
        new_layer.build(input_shapes)

        is_np_weight = [isinstance(x, np.ndarray) for x in weights]
        if all(is_np_weight):
            new_layer.set_weights(weights)
        else:
            if any(is_np_weight):
                raise ValueError("Expect either all weights to be "
                                 "np tensors or symbolic tensors.")

            symbolic_names = get_symbolic_weight_names(old_layer)
            update = {
                name: weight
                for name, weight in zip(symbolic_names, weights)
            }
            update_symbolic_weights(new_layer, update)

    return new_layer
Ejemplo n.º 5
0
def get_input_layers(layer: Layer) -> Set[Layer]:
    """Returns all layers that created this layer's inputs."""
    ret = set()

    for node_index in range(len(layer._inbound_nodes)):
        Xs = iutils.to_list(layer.get_input_at(node_index))
        for X in Xs:
            ret.add(X._keras_history[0])

    return ret
Ejemplo n.º 6
0
def get_layer_neuronwise_io(
    layer: Layer,
    node_index: int = 0,
    Xs: List[Tensor] = None,
    Ys: List[Tensor] = None,
    return_i: bool = True,
    return_o: bool = True,
) -> Union[Tuple[List[Tensor], List[Tensor]], List[Tensor]]:
    """Returns the input and output for each neuron in a layer

    Returns the symbolic input and output for each neuron in a layer.
    For a dense layer this is the input output itself.
    For convolutional layers this method extracts for each neuron
    the input output mapping.

    At the moment this function is designed
    to work with dense and conv2d layers.

    :param layer: The targeted layer.
    :param node_index: Index of the layer node to use.
    :param Xs: Ignore the layer's input but use Xs instead.
    :param Ys: Ignore the layer's output but use Ys instead.
    :param return_i: Return the inputs.
    :param return_o: Return the outputs.
    :return: Inputs and outputs, if specified, for each individual neuron.
    """
    if not kchecks.contains_kernel(layer):
        raise NotImplementedError()

    if Xs is None:
        Xs = iutils.to_list(layer.get_input_at(node_index))
    if Ys is None:
        Ys = iutils.to_list(layer.get_output_at(node_index))

    if isinstance(layer, keras.layers.Dense):
        # Xs and Ys are already in shape.
        ret_Xs = Xs
        ret_Ys = Ys
    elif isinstance(layer, keras.layers.Conv2D):
        kernel = get_kernel(layer)
        # Expect filter dimension to be last.
        n_channels = kernel.shape[-1]

        if return_i:
            extract_patches = ilayers.ExtractConv2DPatches(
                kernel.shape[:2],
                kernel.shape[2],
                layer.strides,
                layer.dilation_rate,
                layer.padding,
            )
            # shape [samples, out_row, out_col, weight_size]
            reshape = ilayers.Reshape((-1, np.product(kernel.shape[:3])))
            ret_Xs = [reshape(extract_patches(x)) for x in Xs]

        if return_o:
            # Get Ys into shape (samples, channels)
            if K.image_data_format() == "channels_first":
                # Ys shape is [samples, channels, out_row, out_col]
                def _reshape(x):
                    x = ilayers.Transpose((0, 2, 3, 1))(x)
                    x = ilayers.Reshape((-1, n_channels))(x)
                    return x

            else:
                # Ys shape is [samples, out_row, out_col, channels]
                def _reshape(x):
                    x = ilayers.Reshape((-1, n_channels))(x)
                    return x

            ret_Ys = [_reshape(x) for x in Ys]

    else:
        raise NotImplementedError()

    # Xs is (n, d) and Ys is (d, channels)
    if return_i and return_o:
        return ret_Xs, ret_Ys
    elif return_i:
        return ret_Xs
    elif return_o:
        return ret_Ys
    else:
        raise Exception()
Ejemplo n.º 7
0
def get_kernel(layer: Layer) -> Tensor:
    """Returns the kernel weights of a layer, i.e, w/o biases."""
    ret = [x for x in layer.get_weights() if len(x.shape) > 1]
    assert len(ret) == 1
    return ret[0]