Esempio n. 1
0
def _clone_sequential_model(model, input_tensors=None):
    """Clone a `Sequential` model instance.

  Model cloning is similar to calling a model on new inputs,
  except that it creates new layers (and thus new weights) instead
  of sharing the weights of the existing layers.

  Arguments:
      model: Instance of `Sequential`.
      input_tensors: optional list of input tensors
          to build the model upon. If not provided,
          placeholders will be created.

  Returns:
      An instance of `Sequential` reproducing the behavior
      of the original model, on top of new inputs tensors,
      using newly instantiated weights.

  Raises:
      ValueError: in case of invalid `model` argument value.
  """
    if not isinstance(model, Sequential):
        raise ValueError(
            'Expected `model` argument '
            'to be a `Sequential` model instance, '
            'but got:', model)

    def clone(layer):
        return layer.__class__.from_config(layer.get_config())

    layers = [clone(layer) for layer in model.layers]
    if input_tensors is None:
        return Sequential(layers=layers, name=model.name)
    else:
        if len(generic_utils.to_list(input_tensors)) != 1:
            raise ValueError('To clone a `Sequential` model, we expect '
                             ' at most one tensor '
                             'as part of `input_tensors`.')
        x = generic_utils.to_list(input_tensors)[0]
        if K.is_keras_tensor(x):
            origin_layer = x._keras_history[0]
            if isinstance(origin_layer, InputLayer):
                return Sequential(layers=[origin_layer] + layers,
                                  name=model.name)
            else:
                raise ValueError('Cannot clone a `Sequential` model on top '
                                 'of a tensor that comes from a Keras layer '
                                 'other than an `InputLayer`. '
                                 'Use the functional API instead.')
        input_tensor = Input(tensor=x, name='input_wrapper_for_' + str(x.name))
        input_layer = input_tensor._keras_history[0]
        return Sequential(layers=[input_layer] + layers, name=model.name)
Esempio n. 2
0
  def build(self, input_shape=None):
    if input_shape and not self.inputs:
      batch_shape = tuple(input_shape)
      dtype = K.floatx()
      x = Input(
          batch_shape=batch_shape, dtype=dtype, name=self.name + '_input')
      self.inputs = [x]
      for layer in self._layers:
        x = layer(x)
      self.outputs = [x]

    if self.inputs:
      self._init_graph_network(self.inputs, self.outputs, name=self.name)
      self.built = True
Esempio n. 3
0
def _clone_functional_model(model, input_tensors=None):
    """Clone a functional `Model` instance.

  Model cloning is similar to calling a model on new inputs,
  except that it creates new layers (and thus new weights) instead
  of sharing the weights of the existing layers.

  Arguments:
      model: Instance of `Model`.
      input_tensors: optional list of input tensors
          to build the model upon. If not provided,
          placeholders will be created.

  Returns:
      An instance of `Model` reproducing the behavior
      of the original model, on top of new inputs tensors,
      using newly instantiated weights.

  Raises:
      ValueError: in case of invalid `model` argument value.
  """
    if not isinstance(model, Model):
        raise ValueError(
            'Expected `model` argument '
            'to be a `Model` instance, got ', model)
    if isinstance(model, Sequential):
        raise ValueError(
            'Expected `model` argument '
            'to be a functional `Model` instance, '
            'got a `Sequential` instance instead:', model)

    layer_map = {}  # Cache for created layers.
    tensor_map = {}  # Map {reference_tensor: (corresponding_tensor, mask)}
    if input_tensors is None:
        # Create placeholders to build the model on top of.
        input_layers = []
        input_tensors = []
        for layer in model._input_layers:
            input_tensor = Input(batch_shape=layer._batch_input_shape,
                                 dtype=layer.dtype,
                                 sparse=layer.sparse,
                                 name=layer.name)
            input_tensors.append(input_tensor)
            # Cache newly created input layer.
            newly_created_input_layer = input_tensor._keras_history[0]
            layer_map[layer] = newly_created_input_layer
        for original_input_layer, cloned_input_layer in zip(
                model._input_layers, input_layers):
            layer_map[original_input_layer] = cloned_input_layer
    else:
        # Make sure that all input tensors come from a Keras layer.
        # If tensor comes from an input layer: cache the input layer.
        input_tensors = generic_utils.to_list(input_tensors)
        input_tensors_ = []
        for i, x in enumerate(input_tensors):
            if not K.is_keras_tensor(x):
                name = model._input_layers[i].name
                input_tensor = Input(tensor=x,
                                     name='input_wrapper_for_' + name)
                input_tensors_.append(input_tensor)
                # Cache newly created input layer.
                original_input_layer = x._keras_history[0]
                newly_created_input_layer = input_tensor._keras_history[0]
                layer_map[original_input_layer] = newly_created_input_layer
            else:
                input_tensors_.append(x)
        input_tensors = input_tensors_

    for x, y in zip(model.inputs, input_tensors):
        tensor_map[x] = (y, None)  # tensor, mask

    # Iterated over every node in the reference model, in depth order.
    depth_keys = list(model._nodes_by_depth.keys())
    depth_keys.sort(reverse=True)
    for depth in depth_keys:
        nodes = model._nodes_by_depth[depth]
        for node in nodes:
            # Recover the corresponding layer.
            layer = node.outbound_layer

            # Get or create layer.
            if layer not in layer_map:
                # Clone layer.
                new_layer = layer.__class__.from_config(layer.get_config())
                layer_map[layer] = new_layer
                layer = new_layer
            else:
                # Reuse previously cloned layer.
                layer = layer_map[layer]
                # Don't call InputLayer multiple times.
                if isinstance(layer, InputLayer):
                    continue

            # Gather inputs to call the new layer.
            referenceinput_tensors_ = node.input_tensors
            reference_output_tensors = node.output_tensors

            # If all previous input tensors are available in tensor_map,
            # then call node.inbound_layer on them.
            computed_data = []  # List of tuples (input, mask).
            for x in referenceinput_tensors_:
                if x in tensor_map:
                    computed_data.append(tensor_map[x])

            if len(computed_data) == len(referenceinput_tensors_):
                # Call layer.
                if node.arguments:
                    kwargs = node.arguments
                else:
                    kwargs = {}
                if len(computed_data) == 1:
                    computed_tensor, computed_mask = computed_data[0]
                    if has_arg(layer.call, 'mask'):
                        if 'mask' not in kwargs:
                            kwargs['mask'] = computed_mask
                    output_tensors = generic_utils.to_list(
                        layer(computed_tensor, **kwargs))
                    output_masks = generic_utils.to_list(
                        layer.compute_mask(computed_tensor, computed_mask))
                    computed_tensors = [computed_tensor]
                    computed_masks = [computed_mask]
                else:
                    computed_tensors = [x[0] for x in computed_data]
                    computed_masks = [x[1] for x in computed_data]
                    if has_arg(layer.call, 'mask'):
                        if 'mask' not in kwargs:
                            kwargs['mask'] = computed_masks
                    output_tensors = generic_utils.to_list(
                        layer(computed_tensors, **kwargs))
                    output_masks = generic_utils.to_list(
                        layer.compute_mask(computed_tensors, computed_masks))
                # Update tensor_map.
                for x, y, mask in zip(reference_output_tensors, output_tensors,
                                      output_masks):
                    tensor_map[x] = (y, mask)

    # Check that we did compute the model outputs,
    # then instantiate a new model from inputs and outputs.
    output_tensors = []
    for x in model.outputs:
        assert x in tensor_map, 'Could not compute output ' + str(x)
        tensor, _ = tensor_map[x]
        output_tensors.append(tensor)
    return Model(input_tensors, output_tensors, name=model.name)
    def add(self, layer):
        """Adds a layer instance on top of the layer stack.

    Arguments:
        layer: layer instance.

    Raises:
        TypeError: If `layer` is not a layer instance.
        ValueError: In case the `layer` argument does not
            know its input shape.
        ValueError: In case the `layer` argument has
            multiple output tensors, or is already connected
            somewhere else (forbidden in `Sequential` models).
    """
        if not isinstance(layer, (base_layer.Layer, base_layer.TFBaseLayer)):
            raise TypeError('The added layer must be '
                            'an instance of class Layer. '
                            'Found: ' + str(layer))
        if not self.outputs:
            # First layer in model: check that it is an input layer.
            if not isinstance(layer, InputLayer):
                # Create an input layer.
                # First, we need to infer its expected input shape and dtype.
                if isinstance(layer, (Model, Sequential)):
                    # We were passed a model as first layer.
                    # This requires a specific way to figure out the
                    # input shape and dtype.
                    if not layer.layers:
                        raise ValueError('Cannot add an empty model '
                                         'to a `Sequential` model.')
                    # In case of nested models: recover the first layer
                    # of the deepest model to infer input shape and dtype.
                    first_layer = layer.layers[0]
                    while isinstance(first_layer, (Model, Sequential)):
                        first_layer = first_layer.layers[0]
                    batch_shape = first_layer._batch_input_shape
                    dtype = first_layer.dtype
                else:
                    # We were passed a regular layer, and it should
                    # know about its input shape. Otherwise, that's an error.
                    if not hasattr(layer, '_batch_input_shape'):
                        raise ValueError('The first layer in a '
                                         'Sequential model must '
                                         'get an `input_shape` argument.')
                    batch_shape = layer._batch_input_shape
                    dtype = layer.dtype
                # Instantiate the input layer.
                x = Input(batch_shape=batch_shape,
                          dtype=dtype,
                          name=layer.name + '_input')
                # This will build the current layer
                # and create the node connecting the current layer
                # to the input layer we just created.
                layer(x)

            if len(layer._inbound_nodes[-1].output_tensors) != 1:
                raise ValueError('All layers in a Sequential model '
                                 'should have a single output tensor. '
                                 'For multi-output layers, '
                                 'use the functional API.')

            self.outputs = [layer._inbound_nodes[-1].output_tensors[0]]
            self.inputs = network.get_source_inputs(self.outputs[0])

            # We create an input node, which we will keep updated
            # as we add more layers
            base_layer.Node(outbound_layer=self,
                            inbound_layers=[],
                            node_indices=[],
                            tensor_indices=[],
                            input_tensors=self.inputs,
                            output_tensors=self.outputs)
        else:
            output_tensor = layer(self.outputs[0])
            if isinstance(output_tensor, list):
                raise TypeError('All layers in a Sequential model '
                                'should have a single output tensor. '
                                'For multi-output layers, '
                                'use the functional API.')
            self.outputs = [output_tensor]
            # update self._inbound_nodes
            self._inbound_nodes[0].output_tensors = self.outputs
            self._inbound_nodes[0].output_shapes = [
                K.int_shape(self.outputs[0])
            ]

        self._layers.append(layer)
        self.built = False
Esempio n. 5
0
    def add(self, layer):
        """Adds a layer instance on top of the layer stack.

    Arguments:
        layer: layer instance.

    Raises:
        TypeError: If `layer` is not a layer instance.
        ValueError: In case the `layer` argument does not
            know its input shape.
        ValueError: In case the `layer` argument has
            multiple output tensors, or is already connected
            somewhere else (forbidden in `Sequential` models).
    """
        if not isinstance(layer, base_layer.Layer):
            raise TypeError('The added layer must be '
                            'an instance of class Layer. '
                            'Found: ' + str(layer))
        self.built = False
        if not self._layers:
            set_inputs = False
            # First layer in model: check that it is an input layer.
            if not isinstance(layer, InputLayer):
                # Create an input tensor and call `layer` on the input tensor.
                # First, we need to infer the expected input shape and dtype.
                first_layer = layer
                if isinstance(layer, (Model, Sequential)):
                    # We were passed a model as first layer.
                    # This requires a specific way to figure out the
                    # input shape and dtype.
                    if not layer.layers:
                        raise ValueError('Cannot add an empty model '
                                         'to a `Sequential` model.')
                    # In case of nested models: recover the first layer
                    # of the deepest model to infer input shape and dtype.
                    first_layer = layer.layers[0]
                    while isinstance(first_layer, (Model, Sequential)):
                        first_layer = first_layer.layers[0]
                    batch_shape = first_layer._batch_input_shape
                    dtype = first_layer.dtype

                if hasattr(first_layer, '_batch_input_shape'):
                    batch_shape = first_layer._batch_input_shape
                    dtype = first_layer.dtype
                    # Instantiate the input layer.
                    x = Input(batch_shape=batch_shape,
                              dtype=dtype,
                              name=layer.name + '_input')
                    # This will build the current layer
                    # and create the node connecting the current layer
                    # to the input layer we just created.
                    layer(x)
                    set_inputs = True
                else:
                    # The layer doesn't know about its expected shape. We will have to
                    # build the model lazily on `fit`/etc.
                    batch_shape = None
            else:
                # Corner case where the user passes an InputLayer layer via `add`.
                assert len(layer._inbound_nodes[-1].output_tensors) == 1
                set_inputs = True

            if set_inputs:
                if len(layer._inbound_nodes[-1].output_tensors) != 1:
                    raise ValueError('All layers in a Sequential model '
                                     'should have a single output tensor. '
                                     'For multi-output layers, '
                                     'use the functional API.')

                self.outputs = [layer._inbound_nodes[-1].output_tensors[0]]
                self.inputs = network.get_source_inputs(self.outputs[0])
        elif self.outputs:
            output_tensor = layer(self.outputs[0])
            if isinstance(output_tensor, list):
                raise TypeError('All layers in a Sequential model '
                                'should have a single output tensor. '
                                'For multi-output layers, '
                                'use the functional API.')
            self.outputs = [output_tensor]
        if self.inputs:
            self.build()
        else:
            self._layers.append(layer)