Exemple #1
0
def _clone_sequential_model(model, input_tensors=None):
    """Clone a `Sequential` model instance.

  Model cloning is similar to calling a model on new inputs,
  except that it creates new layers (and thus new weights) instead
  of sharing the weights of the existing layers.

  Arguments:
      model: Instance of `Sequential`.
      input_tensors: optional list of input tensors
          to build the model upon. If not provided,
          placeholders will be created.

  Returns:
      An instance of `Sequential` reproducing the behavior
      of the original model, on top of new inputs tensors,
      using newly instantiated weights.

  Raises:
      ValueError: in case of invalid `model` argument value.
  """
    if not isinstance(model, Sequential):
        raise ValueError(
            'Expected `model` argument '
            'to be a `Sequential` model instance, '
            'but got:', model)

    def clone(layer):
        return layer.__class__.from_config(layer.get_config())

    layers = [clone(layer) for layer in model.layers]
    if input_tensors is None:
        return Sequential(layers=layers, name=model.name)
    else:
        if len(generic_utils.to_list(input_tensors)) != 1:
            raise ValueError('To clone a `Sequential` model, we expect '
                             ' at most one tensor '
                             'as part of `input_tensors`.')
        x = generic_utils.to_list(input_tensors)[0]
        if K.is_keras_tensor(x):
            origin_layer = x._keras_history[0]
            if isinstance(origin_layer, InputLayer):
                return Sequential(layers=[origin_layer] + layers,
                                  name=model.name)
            else:
                raise ValueError('Cannot clone a `Sequential` model on top '
                                 'of a tensor that comes from a Keras layer '
                                 'other than an `InputLayer`. '
                                 'Use the functional API instead.')
        input_tensor = Input(tensor=x, name='input_wrapper_for_' + str(x.name))
        input_layer = input_tensor._keras_history[0]
        return Sequential(layers=[input_layer] + layers, name=model.name)
Exemple #2
0
def _clone_sequential_model(model, input_tensors=None):
  """Clone a `Sequential` model instance.

  Model cloning is similar to calling a model on new inputs,
  except that it creates new layers (and thus new weights) instead
  of sharing the weights of the existing layers.

  Arguments:
      model: Instance of `Sequential`.
      input_tensors: optional list of input tensors
          to build the model upon. If not provided,
          placeholders will be created.

  Returns:
      An instance of `Sequential` reproducing the behavior
      of the original model, on top of new inputs tensors,
      using newly instantiated weights.

  Raises:
      ValueError: in case of invalid `model` argument value.
  """
  if not isinstance(model, Sequential):
    raise ValueError('Expected `model` argument '
                     'to be a `Sequential` model instance, '
                     'but got:', model)

  def clone(layer):
    return layer.__class__.from_config(layer.get_config())

  layers = [clone(layer) for layer in model.layers]
  if input_tensors is None:
    return Sequential(layers=layers, name=model.name)
  else:
    if len(generic_utils.to_list(input_tensors)) != 1:
      raise ValueError('To clone a `Sequential` model, we expect '
                       ' at most one tensor '
                       'as part of `input_tensors`.')
    x = generic_utils.to_list(input_tensors)[0]
    if K.is_keras_tensor(x):
      origin_layer = x._keras_history[0]
      if isinstance(origin_layer, InputLayer):
        return Sequential(layers=[origin_layer] + layers, name=model.name)
      else:
        raise ValueError('Cannot clone a `Sequential` model on top '
                         'of a tensor that comes from a Keras layer '
                         'other than an `InputLayer`. '
                         'Use the functional API instead.')
    input_tensor = Input(tensor=x, name='input_wrapper_for_' + str(x.name))
    input_layer = input_tensor._keras_history[0]
    return Sequential(layers=[input_layer] + layers, name=model.name)
Exemple #3
0
    def __call__(self, inputs, **kwargs):
        """Wrapper around self.call(), for handling internal references.

    If a Keras tensor is passed:
        - We call self._add_inbound_node().
        - If necessary, we `build` the layer to match
            the shape of the input(s).
        - We update the _keras_history of the output tensor(s)
            with the current layer.
            This is done as part of _add_inbound_node().

    Arguments:
        inputs: Can be a tensor or list/tuple of tensors.
        **kwargs: Additional keyword arguments to be passed to `call()`.

    Returns:
        Output of the layer's `call` method.

    Raises:
        ValueError: in case the layer is missing shape information
            for its `build` call.
    """
        # Actually call the layer (optionally building it).
        output = super(Layer, self).__call__(inputs, **kwargs)
        if context.executing_eagerly():
            return output

        if hasattr(self, '_symbolic_set_inputs') and not self.inputs:
            # Subclassed network: explicitly set metadata normally set by a call to
            # self._set_inputs().
            self._symbolic_set_inputs(inputs, output)

        # Update learning phase info.
        output_tensors = generic_utils.to_list(output)
        uses_lp = any([
            getattr(x, '_uses_learning_phase', False)
            for x in generic_utils.to_list(inputs)
        ])
        uses_lp = getattr(self, 'uses_learning_phase', False) or uses_lp
        for i in range(len(output_tensors)):
            output_tensors[i]._uses_learning_phase = getattr(
                output_tensors[i], '_uses_learning_phase', False) or uses_lp

        # Optionally load weight values that were specified at layer instantiation.
        if hasattr(self,
                   '_initial_weights') and self._initial_weights is not None:
            self.set_weights(self._initial_weights)
            del self._initial_weights
        return output
Exemple #4
0
  def __call__(self, inputs, **kwargs):
    """Wrapper around self.call(), for handling internal references.

    If a Keras tensor is passed:
        - We call self._add_inbound_node().
        - If necessary, we `build` the layer to match
            the shape of the input(s).
        - We update the _keras_history of the output tensor(s)
            with the current layer.
            This is done as part of _add_inbound_node().

    Arguments:
        inputs: Can be a tensor or list/tuple of tensors.
        **kwargs: Additional keyword arguments to be passed to `call()`.

    Returns:
        Output of the layer's `call` method.

    Raises:
        ValueError: in case the layer is missing shape information
            for its `build` call.
    """
    # Actually call the layer (optionally building it).
    output = super(Layer, self).__call__(inputs, **kwargs)
    if context.executing_eagerly():
      return output

    if hasattr(self, '_symbolic_set_inputs') and not self.inputs:
      # Subclassed network: explicitly set metadata normally set by a call to
      # self._set_inputs().
      self._symbolic_set_inputs(inputs, output)

    # Update learning phase info.
    output_tensors = generic_utils.to_list(output)
    uses_lp = any(
        [getattr(x, '_uses_learning_phase', False)
         for x in generic_utils.to_list(inputs)])
    uses_lp = getattr(self, 'uses_learning_phase', False) or uses_lp
    for i in range(len(output_tensors)):
      output_tensors[i]._uses_learning_phase = getattr(
          output_tensors[i], '_uses_learning_phase', False) or uses_lp

    # Optionally load weight values that were specified at layer instantiation.
    if hasattr(self, '_initial_weights') and self._initial_weights is not None:
      self.set_weights(self._initial_weights)
      del self._initial_weights
    return output
Exemple #5
0
def _clone_functional_model(model, input_tensors=None):
    """Clone a functional `Model` instance.

  Model cloning is similar to calling a model on new inputs,
  except that it creates new layers (and thus new weights) instead
  of sharing the weights of the existing layers.

  Arguments:
      model: Instance of `Model`.
      input_tensors: optional list of input tensors
          to build the model upon. If not provided,
          placeholders will be created.

  Returns:
      An instance of `Model` reproducing the behavior
      of the original model, on top of new inputs tensors,
      using newly instantiated weights.

  Raises:
      ValueError: in case of invalid `model` argument value.
  """
    if not isinstance(model, Model):
        raise ValueError(
            'Expected `model` argument '
            'to be a `Model` instance, got ', model)
    if isinstance(model, Sequential):
        raise ValueError(
            'Expected `model` argument '
            'to be a functional `Model` instance, '
            'got a `Sequential` instance instead:', model)

    layer_map = {}  # Cache for created layers.
    tensor_map = {}  # Map {reference_tensor: (corresponding_tensor, mask)}
    if input_tensors is None:
        # Create placeholders to build the model on top of.
        input_layers = []
        input_tensors = []
        for layer in model._input_layers:
            input_tensor = Input(batch_shape=layer._batch_input_shape,
                                 dtype=layer.dtype,
                                 sparse=layer.sparse,
                                 name=layer.name)
            input_tensors.append(input_tensor)
            # Cache newly created input layer.
            newly_created_input_layer = input_tensor._keras_history[0]
            layer_map[layer] = newly_created_input_layer
        for original_input_layer, cloned_input_layer in zip(
                model._input_layers, input_layers):
            layer_map[original_input_layer] = cloned_input_layer
    else:
        # Make sure that all input tensors come from a Keras layer.
        # If tensor comes from an input layer: cache the input layer.
        input_tensors = generic_utils.to_list(input_tensors)
        input_tensors_ = []
        for i, x in enumerate(input_tensors):
            if not K.is_keras_tensor(x):
                name = model._input_layers[i].name
                input_tensor = Input(tensor=x,
                                     name='input_wrapper_for_' + name)
                input_tensors_.append(input_tensor)
                # Cache newly created input layer.
                original_input_layer = x._keras_history[0]
                newly_created_input_layer = input_tensor._keras_history[0]
                layer_map[original_input_layer] = newly_created_input_layer
            else:
                input_tensors_.append(x)
        input_tensors = input_tensors_

    for x, y in zip(model.inputs, input_tensors):
        tensor_map[x] = (y, None)  # tensor, mask

    # Iterated over every node in the reference model, in depth order.
    depth_keys = list(model._nodes_by_depth.keys())
    depth_keys.sort(reverse=True)
    for depth in depth_keys:
        nodes = model._nodes_by_depth[depth]
        for node in nodes:
            # Recover the corresponding layer.
            layer = node.outbound_layer

            # Get or create layer.
            if layer not in layer_map:
                # Clone layer.
                new_layer = layer.__class__.from_config(layer.get_config())
                layer_map[layer] = new_layer
                layer = new_layer
            else:
                # Reuse previously cloned layer.
                layer = layer_map[layer]
                # Don't call InputLayer multiple times.
                if isinstance(layer, InputLayer):
                    continue

            # Gather inputs to call the new layer.
            referenceinput_tensors_ = node.input_tensors
            reference_output_tensors = node.output_tensors

            # If all previous input tensors are available in tensor_map,
            # then call node.inbound_layer on them.
            computed_data = []  # List of tuples (input, mask).
            for x in referenceinput_tensors_:
                if x in tensor_map:
                    computed_data.append(tensor_map[x])

            if len(computed_data) == len(referenceinput_tensors_):
                # Call layer.
                if node.arguments:
                    kwargs = node.arguments
                else:
                    kwargs = {}
                if len(computed_data) == 1:
                    computed_tensor, computed_mask = computed_data[0]
                    if has_arg(layer.call, 'mask'):
                        if 'mask' not in kwargs:
                            kwargs['mask'] = computed_mask
                    output_tensors = generic_utils.to_list(
                        layer(computed_tensor, **kwargs))
                    output_masks = generic_utils.to_list(
                        layer.compute_mask(computed_tensor, computed_mask))
                    computed_tensors = [computed_tensor]
                    computed_masks = [computed_mask]
                else:
                    computed_tensors = [x[0] for x in computed_data]
                    computed_masks = [x[1] for x in computed_data]
                    if has_arg(layer.call, 'mask'):
                        if 'mask' not in kwargs:
                            kwargs['mask'] = computed_masks
                    output_tensors = generic_utils.to_list(
                        layer(computed_tensors, **kwargs))
                    output_masks = generic_utils.to_list(
                        layer.compute_mask(computed_tensors, computed_masks))
                # Update tensor_map.
                for x, y, mask in zip(reference_output_tensors, output_tensors,
                                      output_masks):
                    tensor_map[x] = (y, mask)

    # Check that we did compute the model outputs,
    # then instantiate a new model from inputs and outputs.
    output_tensors = []
    for x in model.outputs:
        assert x in tensor_map, 'Could not compute output ' + str(x)
        tensor, _ = tensor_map[x]
        output_tensors.append(tensor)
    return Model(input_tensors, output_tensors, name=model.name)
Exemple #6
0
  def __call__(self, inputs, *args, **kwargs):
    """Wrapper around self.call(), for handling internal references.

    If a Keras tensor is passed:
        - We call self._add_inbound_node().
        - If necessary, we `build` the layer to match
            the shape of the input(s).
        - We update the _keras_history of the output tensor(s)
            with the current layer.
            This is done as part of _add_inbound_node().

    Arguments:
        inputs: Can be a tensor or list/tuple of tensors.
        *args: Additional positional arguments to be passed to `call()`. Only
          allowed in subclassed Models with custom call() signatures. In other
          cases, `Layer` inputs must be passed using the `inputs` argument and
          non-inputs must be keyword arguments.
        **kwargs: Additional keyword arguments to be passed to `call()`.

    Returns:
        Output of the layer's `call` method.

    Raises:
        ValueError: in case the layer is missing shape information
            for its `build` call.
        TypeError: If positional arguments are passed and this `Layer` is not a
            subclassed `Model`.
    """
    # Actually call the layer (optionally building it).
    output = super(Layer, self).__call__(inputs, *args, **kwargs)

    if args and getattr(self, '_uses_inputs_arg', True):
      raise TypeError(
          'This Layer takes an `inputs` argument to call(), and only the '
          '`inputs` argument may be specified as a positional argument. Pass '
          'everything else as a keyword argument (those arguments will not be '
          'tracked as inputs to the Layer).')

    if context.executing_eagerly():
      return output

    inputs, kwargs = self._inputs_from_call_args(
        call_args=(inputs,) + args, call_kwargs=kwargs)

    if hasattr(self, '_symbolic_set_inputs') and not self.inputs:
      # Subclassed network: explicitly set metadata normally set by a call to
      # self._set_inputs().
      self._symbolic_set_inputs(inputs, output)

    # Update learning phase info.
    output_tensors = generic_utils.to_list(output)
    uses_lp = any(
        [getattr(x, '_uses_learning_phase', False)
         for x in generic_utils.to_list(inputs)])
    uses_lp = getattr(self, 'uses_learning_phase', False) or uses_lp
    for i in range(len(output_tensors)):
      output_tensors[i]._uses_learning_phase = getattr(
          output_tensors[i], '_uses_learning_phase', False) or uses_lp

    # Optionally load weight values that were specified at layer instantiation.
    if hasattr(self, '_initial_weights') and self._initial_weights is not None:
      self.set_weights(self._initial_weights)
      del self._initial_weights
    return output
Exemple #7
0
def _clone_functional_model(model, input_tensors=None):
  """Clone a functional `Model` instance.

  Model cloning is similar to calling a model on new inputs,
  except that it creates new layers (and thus new weights) instead
  of sharing the weights of the existing layers.

  Arguments:
      model: Instance of `Model`.
      input_tensors: optional list of input tensors
          to build the model upon. If not provided,
          placeholders will be created.

  Returns:
      An instance of `Model` reproducing the behavior
      of the original model, on top of new inputs tensors,
      using newly instantiated weights.

  Raises:
      ValueError: in case of invalid `model` argument value.
  """
  if not isinstance(model, Model):
    raise ValueError('Expected `model` argument '
                     'to be a `Model` instance, got ', model)
  if isinstance(model, Sequential):
    raise ValueError('Expected `model` argument '
                     'to be a functional `Model` instance, '
                     'got a `Sequential` instance instead:', model)

  layer_map = {}  # Cache for created layers.
  tensor_map = {}  # Map {reference_tensor: (corresponding_tensor, mask)}
  if input_tensors is None:
    # Create placeholders to build the model on top of.
    input_layers = []
    input_tensors = []
    for layer in model._input_layers:
      input_tensor = Input(
          batch_shape=layer._batch_input_shape,
          dtype=layer.dtype,
          sparse=layer.sparse,
          name=layer.name)
      input_tensors.append(input_tensor)
      # Cache newly created input layer.
      newly_created_input_layer = input_tensor._keras_history[0]
      layer_map[layer] = newly_created_input_layer
    for original_input_layer, cloned_input_layer in zip(model._input_layers,
                                                        input_layers):
      layer_map[original_input_layer] = cloned_input_layer
  else:
    # Make sure that all input tensors come from a Keras layer.
    # If tensor comes from an input layer: cache the input layer.
    input_tensors = generic_utils.to_list(input_tensors)
    input_tensors_ = []
    for i, x in enumerate(input_tensors):
      if not K.is_keras_tensor(x):
        name = model._input_layers[i].name
        input_tensor = Input(tensor=x, name='input_wrapper_for_' + name)
        input_tensors_.append(input_tensor)
        # Cache newly created input layer.
        original_input_layer = x._keras_history[0]
        newly_created_input_layer = input_tensor._keras_history[0]
        layer_map[original_input_layer] = newly_created_input_layer
      else:
        input_tensors_.append(x)
    input_tensors = input_tensors_

  for x, y in zip(model.inputs, input_tensors):
    tensor_map[x] = (y, None)  # tensor, mask

  # Iterated over every node in the reference model, in depth order.
  depth_keys = list(model._nodes_by_depth.keys())
  depth_keys.sort(reverse=True)
  for depth in depth_keys:
    nodes = model._nodes_by_depth[depth]
    for node in nodes:
      # Recover the corresponding layer.
      layer = node.outbound_layer

      # Get or create layer.
      if layer not in layer_map:
        # Clone layer.
        new_layer = layer.__class__.from_config(layer.get_config())
        layer_map[layer] = new_layer
        layer = new_layer
      else:
        # Reuse previously cloned layer.
        layer = layer_map[layer]
        # Don't call InputLayer multiple times.
        if isinstance(layer, InputLayer):
          continue

      # Gather inputs to call the new layer.
      referenceinput_tensors_ = node.input_tensors
      reference_output_tensors = node.output_tensors

      # If all previous input tensors are available in tensor_map,
      # then call node.inbound_layer on them.
      computed_data = []  # List of tuples (input, mask).
      for x in referenceinput_tensors_:
        if x in tensor_map:
          computed_data.append(tensor_map[x])

      if len(computed_data) == len(referenceinput_tensors_):
        # Call layer.
        if node.arguments:
          kwargs = node.arguments
        else:
          kwargs = {}
        if len(computed_data) == 1:
          computed_tensor, computed_mask = computed_data[0]
          if has_arg(layer.call, 'mask'):
            if 'mask' not in kwargs:
              kwargs['mask'] = computed_mask
          output_tensors = generic_utils.to_list(layer(computed_tensor,
                                                       **kwargs))
          output_masks = generic_utils.to_list(
              layer.compute_mask(computed_tensor, computed_mask))
          computed_tensors = [computed_tensor]
          computed_masks = [computed_mask]
        else:
          computed_tensors = [x[0] for x in computed_data]
          computed_masks = [x[1] for x in computed_data]
          if has_arg(layer.call, 'mask'):
            if 'mask' not in kwargs:
              kwargs['mask'] = computed_masks
          output_tensors = generic_utils.to_list(layer(computed_tensors,
                                                       **kwargs))
          output_masks = generic_utils.to_list(
              layer.compute_mask(computed_tensors, computed_masks))
        # Update tensor_map.
        for x, y, mask in zip(reference_output_tensors, output_tensors,
                              output_masks):
          tensor_map[x] = (y, mask)

  # Check that we did compute the model outputs,
  # then instantiate a new model from inputs and outputs.
  output_tensors = []
  for x in model.outputs:
    assert x in tensor_map, 'Could not compute output ' + str(x)
    tensor, _ = tensor_map[x]
    output_tensors.append(tensor)
  return Model(input_tensors, output_tensors, name=model.name)