Example #1
0
 def call_and_return_conditional_losses(inputs, *args, **kwargs):
   """Returns layer (call_output, conditional losses) tuple."""
   call_output = layer_call(inputs, *args, **kwargs)
   if version_utils.is_v1_layer_or_model(layer):
     conditional_losses = layer.get_losses_for(inputs)
   else:
     conditional_losses = [
         l for l in layer.losses if not hasattr(l, '_unconditional_loss')
     ]
   return call_output, conditional_losses
Example #2
0
def try_build_compiled_arguments(model):
    if (not version_utils.is_v1_layer_or_model(model)
            and model.outputs is not None):
        try:
            model.compiled_loss.build(model.outputs)
            model.compiled_metrics.build(model.outputs, model.outputs)
        except:  # pylint: disable=bare-except
            logging.warning(
                'Compiled the loaded model, but the compiled metrics have yet to '
                'be built. `model.compile_metrics` will be empty until you train '
                'or evaluate the model.')
Example #3
0
def load(path, compile=True):  # pylint: disable=redefined-builtin
    """Loads Keras objects from a SavedModel.

  Any Keras layer or model saved to the SavedModel will be loaded back
  as Keras objects. Other objects are loaded as regular trackable objects (same
  as `tf.saved_model.load`).

  Currently, Keras saving/loading only retains the Keras object's weights,
  losses, and call function.

  The loaded model can be re-compiled, but the original optimizer, compiled loss
  functions, and metrics are not retained. This is temporary, and `model.save`
  will soon be able to serialize compiled models.

  Args:
    path: Path to SavedModel.
    compile: If true, compile the model after loading it.

  Returns:
    Object loaded from SavedModel.
  """
    # TODO(kathywu): Add saving/loading of optimizer, compiled losses and metrics.
    # TODO(kathywu): Add code to load from objects that contain all endpoints
    model = tf_load.load_internal(path, loader_cls=KerasObjectLoader)

    # pylint: disable=protected-access
    if isinstance(model, training_lib.Model) and compile:
        # TODO(kathywu): Use compiled objects from SavedModel, instead of
        # creating new objects from the training config.
        training_config = model._serialized_attributes['metadata'].get(
            'training_config', None)
        if training_config is not None:
            model.compile(**saving_utils.compile_args_from_training_config(
                training_config))
            if (not version_utils.is_v1_layer_or_model(model)
                    and model.outputs is not None):
                model.compiled_metrics.build(model.outputs, model.outputs)
                model.compiled_loss.build(model.outputs)
        else:
            logging.warning(
                'No training configuration found in save file, so the '
                'model was *not* compiled. Compile it manually.')
    # pylint: enable=protected-access

    # Force variables and resources to initialize.
    if not context.executing_eagerly():
        sess = backend.get_session()  # Variables are initialized by this call.
        sess.run(ops.get_collection(ops.GraphKeys.TABLE_INITIALIZERS))

    return model
Example #4
0
def load_model_from_hdf5(filepath, custom_objects=None, compile=True):  # pylint: disable=redefined-builtin
  """Loads a model saved via `save_model_to_hdf5`.

  Arguments:
      filepath: One of the following:
          - String, path to the saved model
          - `h5py.File` object from which to load the model
      custom_objects: Optional dictionary mapping names
          (strings) to custom classes or functions to be
          considered during deserialization.
      compile: Boolean, whether to compile the model
          after loading.

  Returns:
      A Keras model instance. If an optimizer was found
      as part of the saved model, the model is already
      compiled. Otherwise, the model is uncompiled and
      a warning will be displayed. When `compile` is set
      to False, the compilation is omitted without any
      warning.

  Raises:
      ImportError: if h5py is not available.
      ValueError: In case of an invalid savefile.
  """
  if h5py is None:
    raise ImportError('`load_model` requires h5py.')

  if not custom_objects:
    custom_objects = {}

  opened_new_file = not isinstance(filepath, h5py.File)
  if opened_new_file:
    f = h5py.File(filepath, mode='r')
  else:
    f = filepath

  model = None
  try:
    # instantiate model
    model_config = f.attrs.get('model_config')
    if model_config is None:
      raise ValueError('No model found in config file.')
    model_config = json.loads(model_config.decode('utf-8'))
    model = model_config_lib.model_from_config(model_config,
                                               custom_objects=custom_objects)

    # set weights
    load_weights_from_hdf5_group(f['model_weights'], model.layers)

    if compile:
      # instantiate optimizer
      training_config = f.attrs.get('training_config')
      if training_config is None:
        logging.warning('No training configuration found in the save file, so '
                        'the model was *not* compiled. Compile it manually.')
        return model
      training_config = json.loads(training_config.decode('utf-8'))

      # Compile model.
      model.compile(**saving_utils.compile_args_from_training_config(
          training_config, custom_objects))

      if not version_utils.is_v1_layer_or_model(model):
        model.compiled_metrics.build(model.outputs, model.outputs)
        model.compiled_loss.build(model.outputs)

      # Set optimizer weights.
      if 'optimizer_weights' in f:
        try:
          model.optimizer._create_all_weights(model.trainable_variables)
        except (NotImplementedError, AttributeError):
          logging.warning(
              'Error when creating the weights of optimizer {}, making it '
              'impossible to restore the saved optimizer state. As a result, '
              'your model is starting with a freshly initialized optimizer.')

        optimizer_weight_values = load_optimizer_weights_from_hdf5_group(f)
        try:
          model.optimizer.set_weights(optimizer_weight_values)
        except ValueError:
          logging.warning('Error in loading the saved optimizer '
                          'state. As a result, your model is '
                          'starting with a freshly initialized '
                          'optimizer.')
  finally:
    if opened_new_file:
      f.close()
  return model