Example #1
0
def _clone_model(model, input_tensors):
    """Clone model with configs, except of weights."""
    new_input_layers = {}  # Cache for created layers.
    # pylint: disable=protected-access
    if input_tensors is not None:
        # Make sure that all input tensors come from a Keras layer.
        input_tensors = tf.nest.flatten(input_tensors)
        for i, input_tensor in enumerate(input_tensors):
            if not tf.keras.backend.is_keras_tensor(input_tensor):
                raise ValueError('Expected keras tensor but get', input_tensor)
            original_input_layer = model._input_layers[i]
            newly_created_input_layer = input_tensor._keras_history.layer
            new_input_layers[original_input_layer] = newly_created_input_layer

    model_config, created_layers = models._clone_layers_and_model_config(
        model, new_input_layers, models._clone_layer)
    # pylint: enable=protected-access

    # Reconstruct model from the config, using the cloned layers.
    input_tensors, output_tensors, created_layers = (
        functional.reconstruct_from_config(model_config,
                                           created_layers=created_layers))

    new_model = tf.keras.Model(input_tensors, output_tensors, name=model.name)
    return new_model
def convert_to_compressed_phase_from_training_phase(model_training, config):
  """Convert to compression phase from training phase model."""
  helper = ConvertHelper(config)
  model_config, created_layers = _clone_layers_and_model_config(
      model_training, {}, helper.convert_layer_fn)

  # Reconstruct model from the config, using the cloned layers.
  input_tensors, output_tensors, created_layers = (
      functional.reconstruct_from_config(
          model_config,
          created_layers=created_layers))
  metrics_names = model_training.metrics_names
  model = tf.keras.Model(
      input_tensors, output_tensors, name=model_training.name)
  # Layers not directly tied to outputs of the Model, such as loss layers
  # created in `add_loss` and `add_metric`.
  ancillary_layers = [
      layer for layer in created_layers.values() if layer not in model.layers
  ]
  #### start hook ####
  ancillary_layers += config.layers
  #### end hook ####
  # pylint: disable=protected-access
  if ancillary_layers:
    new_nodes = nest.flatten([
        layer.inbound_nodes[1:]
        if functional._should_skip_first_node(layer)
        else layer.inbound_nodes for layer in created_layers.values()
    ])
    _insert_ancillary_layers(model, ancillary_layers, metrics_names, new_nodes)
  # pylint: enable=protected-access
  #
  # TODO(kimjaehong): set weight model_training => model.
  #
  ######## start hook2 ########

  orig_layers = {layer.name: layer for layer in model_training.layers}
  for key in orig_layers:
    layer = orig_layers[key]
    if isinstance(layer, CompressionModel):
      continue
    created_layers[key].set_weights(layer.get_weights())

  ######## end hook2 ########

  return model
Example #3
0
def _clone_functional_model(model, input_tensors=None, layer_fn=_clone_layer):
    """Clone a functional `Model` instance.

  Model cloning is similar to calling a model on new inputs,
  except that it creates new layers (and thus new weights) instead
  of sharing the weights of the existing layers.

  Input layers are always cloned.

  Args:
      model: Instance of `Model`.
      input_tensors: optional list of input tensors
          to build the model upon. If not provided,
          placeholders will be created.
      layer_fn: callable to be applied on non-input layers in the model. By
          default it clones the layer. Another example is to preserve the layer
          to share the weights. This is required when we create a per-replica
          copy of the model with distribution strategy; we want the weights to
          be shared but still feed inputs separately so we create new input
          layers.

  Returns:
      An instance of `Model` reproducing the behavior
      of the original model, on top of new inputs tensors,
      using newly instantiated weights.

  Raises:
      ValueError: in case of invalid `model` argument value or `layer_fn`
      argument value.
  """
    if not isinstance(model, Model):
        raise ValueError(
            'Expected `model` argument '
            'to be a `Model` instance, got ', model)
    if isinstance(model, Sequential):
        raise ValueError(
            'Expected `model` argument '
            'to be a functional `Model` instance, '
            'got a `Sequential` instance instead:', model)
    if not model._is_graph_network:
        raise ValueError('Expected `model` argument '
                         'to be a functional `Model` instance, '
                         'but got a subclass model instead.')

    new_input_layers = {}  # Cache for created layers.
    if input_tensors is not None:
        # Make sure that all input tensors come from a Keras layer.
        input_tensors = nest.flatten(input_tensors)
        for i, input_tensor in enumerate(input_tensors):
            original_input_layer = model._input_layers[i]

            # Cache input layer. Create a new layer if the tensor is originally not
            # from a Keras layer.
            if not backend.is_keras_tensor(input_tensor):
                name = original_input_layer.name
                input_tensor = Input(tensor=input_tensor,
                                     name='input_wrapper_for_' + name)
                newly_created_input_layer = input_tensor._keras_history.layer
                new_input_layers[
                    original_input_layer] = newly_created_input_layer
            else:
                new_input_layers[original_input_layer] = original_input_layer

    if not callable(layer_fn):
        raise ValueError('Expected `layer_fn` argument to be a callable.')

    model_configs, created_layers = _clone_layers_and_model_config(
        model, new_input_layers, layer_fn)
    # Reconstruct model from the config, using the cloned layers.
    input_tensors, output_tensors, created_layers = (
        functional.reconstruct_from_config(model_configs,
                                           created_layers=created_layers))
    metrics_names = model.metrics_names
    model = Model(input_tensors, output_tensors, name=model.name)
    # Layers not directly tied to outputs of the Model, such as loss layers
    # created in `add_loss` and `add_metric`.
    ancillary_layers = [
        layer for layer in created_layers.values() if layer not in model.layers
    ]
    # TODO(b/162887610): This may need to adjust the inbound node index if the
    # created layers had already been used to define other models.
    if ancillary_layers:
        new_nodes = nest.flatten([
            layer.inbound_nodes[1:] if
            functional._should_skip_first_node(layer) else layer.inbound_nodes
            for layer in created_layers.values()
        ])
        _insert_ancillary_layers(model, ancillary_layers, metrics_names,
                                 new_nodes)
    return model
def convert_to_original_from_compressed_phase(compressed_model, config):
  """Convert to compression phase from training phase model."""
  helper = DecompressHelper(config)
  model_config, created_layers = _clone_layers_and_model_config(
      compressed_model, {}, helper.convert_layer_fn)

  model_config['layers'], created_layers = \
    remove_none(model_config['layers'], created_layers)
  # Reconstruct model from the config, using the cloned layers.
  input_tensors, output_tensors, created_layers = (
      functional.reconstruct_from_config(
          model_config,
          created_layers=created_layers))
  metrics_names = compressed_model.metrics_names
  model = tf.keras.Model(
      input_tensors, output_tensors, name=compressed_model.name)
  # Layers not directly tied to outputs of the Model, such as loss layers
  # created in `add_loss` and `add_metric`.
  ancillary_layers = [
      layer for layer in created_layers.values() if layer not in model.layers
  ]
  # pylint: disable=protected-access
  if ancillary_layers:
    new_nodes = nest.flatten([
        layer.inbound_nodes[1:]
        if functional._should_skip_first_node(layer)
        else layer.inbound_nodes for layer in created_layers.values()
    ])
    _insert_ancillary_layers(model, ancillary_layers, metrics_names, new_nodes)
  # pylint: enable=protected-access
  #
  # TODO(kimjaehong): set weight model_training => model.
  #
  ######## start hook ########

  # TODO(kimjaehong): currently only support conv / dense layer for kernel.
  # pytype: disable=attribute-error
  output_weights = helper.compression_model(tf.constant(0.))
  output_weight_map = config.output_weight_map
  output_weight_spec_keys, _ = dict_flatten(config.output_weight_spec_dict)

  orig_layers = {layer.name: layer for layer in compressed_model.layers}
  for key in orig_layers:
    layer = orig_layers[key]
    if isinstance(layer, CompressionModel):
      continue
    if key in created_layers:
      weights = layer.get_weights()
      to_layer = created_layers[key]
      if to_layer.name in output_weight_map:
        prepend_weights = []
        for weight_key in output_weight_map[to_layer.name]:
          tensor_idx = output_weight_spec_keys.index(
              output_weight_map[to_layer.name][weight_key])
          prepend_weights.append(output_weights[tensor_idx])
        # TODO(kimjaehong): Should it be numpy array?
        weights = prepend_weights + weights
      to_layer.set_weights(weights)
  # pytype: enable=attribute-error

  ######## end hook ########

  return model
def convert_from_model(
    model_orig,
    config,
    phase=CompressionModelPhase.training):
  """Convert a functional `Model` instance.

  Arguments:
      model_orig: Instance of `Model`.
      config: CompressionConfig
      phase: CompressionModelPhase

  Returns:
      An instance of `Model`.

  Raises:
      ValueError: in case of invalid `model` argument value or `layer_fn`
      argument value.
  """
  model_config, created_layers = _clone_layers_and_model_config(
      model_orig, {}, config.clone_layer)

  ############## start hook ##############
  # TODO(kimjaehong): This hook is working for simple model.
  # but need to be generalize.

  dummy_inbound_nodes = [[model_config['input_layers'][0]]]
  # TODO(kimjaehong): Find better name.
  dummy_compression_model_name = 'compression_model'

  model_config['layers'].insert(
      1,
      {
          'inbound_nodes': dummy_inbound_nodes,
          'name': dummy_compression_model_name
      })
  compression_model = CompressionModel(config, phase=phase)
  created_layers[dummy_compression_model_name] = compression_model

  num_losses = 0
  tensor_idx = 0
  for function_group in config.function_groups:
    num_losses += function_group.num_losses
    for layer, weight_key in function_group.kernels:
      for layer_cfg in model_config['layers']:
        if layer_cfg['name'] == layer.name:
          layer_cfg['inbound_nodes'][0][0][3][weight_key] = [
              dummy_compression_model_name, 0, tensor_idx]

      tensor_idx += 1

  output_len = len(model_config['output_layers'])

  for _ in range(num_losses):
    model_config['output_layers'].append(
        [dummy_compression_model_name, 0, tensor_idx])
    tensor_idx += 1
  ############## end hook ##############

  # Reconstruct model from the config, using the cloned layers.
  input_tensors, output_tensors, created_layers = (
      functional.reconstruct_from_config(
          model_config,
          created_layers=created_layers))
  metrics_names = model_orig.metrics_names
  loss_tensors = output_tensors[output_len:]
  output_tensors = output_tensors[:output_len]

  model = tf.keras.Model(input_tensors, output_tensors, name=model_orig.name)
  for loss_tensor in loss_tensors:
    model.add_loss(loss_tensor)

  # Layers not directly tied to outputs of the Model, such as loss layers
  # created in `add_loss` and `add_metric`.
  ancillary_layers = [
      layer for layer in created_layers.values() if layer not in model.layers
  ]
  #### start hook2 ####
  ancillary_layers += config.layers
  #### end hook2 ####
  # pylint: disable=protected-access
  if ancillary_layers:
    new_nodes = nest.flatten([
        layer.inbound_nodes[1:]
        if functional._should_skip_first_node(layer)
        else layer.inbound_nodes for layer in created_layers.values()
    ])
    _insert_ancillary_layers(model, ancillary_layers, metrics_names, new_nodes)
  # pylint: enable=protected-access
  return model