Пример #1
0
 def test_json_serialization(self):
   inputs = keras.Input(shape=(4,), dtype='uint8')
   outputs = math_ops.cast(inputs, 'float32') / 4.
   model = saving.model_from_json(keras.Model(inputs, outputs).to_json())
   self.assertAllEqual(
       self.evaluate(model(np.array([0, 64, 128, 192], np.uint8))),
       [0., 16., 32., 48.])
   model.summary()
Пример #2
0
def load_from_saved_model(saved_model_path, custom_objects=None):
    """Loads a keras Model from a SavedModel created by `export_saved_model()`.

  This function reinstantiates model state by:
  1) loading model topology from json (this will eventually come
     from metagraph).
  2) loading model weights from checkpoint.

  Example:

  ```python
  import tensorflow as tf

  # Create a tf.keras model.
  model = tf.keras.Sequential()
  model.add(tf.keras.layers.Dense(1, input_shape=[10]))
  model.summary()

  # Save the tf.keras model in the SavedModel format.
  path = '/tmp/simple_keras_model'
  tf.keras.experimental.export_saved_model(model, path)

  # Load the saved keras model back.
  new_model = tf.keras.experimental.load_from_saved_model(path)
  new_model.summary()
  ```

  Args:
    saved_model_path: a string specifying the path to an existing SavedModel.
    custom_objects: Optional dictionary mapping names
        (strings) to custom classes or functions to be
        considered during deserialization.

  Returns:
    a keras.Model instance.
  """
    # restore model topology from json string
    model_json_filepath = os.path.join(
        compat.as_bytes(saved_model_path),
        compat.as_bytes(constants.ASSETS_DIRECTORY),
        compat.as_bytes(constants.SAVED_MODEL_FILENAME_JSON))
    model_json = file_io.read_file_to_string(model_json_filepath)
    model = model_from_json(model_json, custom_objects=custom_objects)

    # restore model weights
    checkpoint_prefix = os.path.join(
        compat.as_text(saved_model_path),
        compat.as_text(constants.VARIABLES_DIRECTORY),
        compat.as_text(constants.VARIABLES_FILENAME))
    model.load_weights(checkpoint_prefix)
    return model
Пример #3
0
def load_from_saved_model(saved_model_path, custom_objects=None):
  """Loads a keras Model from a SavedModel created by `export_saved_model()`.

  This function reinstantiates model state by:
  1) loading model topology from json (this will eventually come
     from metagraph).
  2) loading model weights from checkpoint.

  Example:

  ```python
  import tensorflow as tf

  # Create a tf.keras model.
  model = tf.keras.Sequential()
  model.add(tf.keras.layers.Dense(1, input_shape=[10]))
  model.summary()

  # Save the tf.keras model in the SavedModel format.
  path = '/tmp/simple_keras_model'
  tf.keras.experimental.export_saved_model(model, path)

  # Load the saved keras model back.
  new_model = tf.keras.experimental.load_from_saved_model(path)
  new_model.summary()
  ```

  Args:
    saved_model_path: a string specifying the path to an existing SavedModel.
    custom_objects: Optional dictionary mapping names
        (strings) to custom classes or functions to be
        considered during deserialization.

  Returns:
    a keras.Model instance.
  """
  # restore model topology from json string
  model_json_filepath = os.path.join(
      compat.as_bytes(saved_model_path),
      compat.as_bytes(constants.ASSETS_DIRECTORY),
      compat.as_bytes(constants.SAVED_MODEL_FILENAME_JSON))
  model_json = file_io.read_file_to_string(model_json_filepath)
  model = model_from_json(model_json, custom_objects=custom_objects)

  # restore model weights
  checkpoint_prefix = os.path.join(
      compat.as_text(saved_model_path),
      compat.as_text(constants.VARIABLES_DIRECTORY),
      compat.as_text(constants.VARIABLES_FILENAME))
  model.load_weights(checkpoint_prefix)
  return model
Пример #4
0
def load_from_saved_model(saved_model_path):
    """Loads a keras.Model from a SavedModel created by keras export().

  This function reinstantiates model state by:
  1) loading model topology from json (this will eventually come
     from metagraph).
  2) loading model weights from checkpoint.

  Example:

  ```python
  import tensorflow as tf

  # Create a tf.keras model.
  model = tf.keras.Sequential()
  model.add(tf.keras.layers.Dense(1, input_shape=[10]))
  model.summary()

  # Save the tf.keras model in the SavedModel format.
  saved_to_path = tf.keras.experimental.export(
        model, '/tmp/my_simple_tf_keras_saved_model')

  # Load the saved keras model back.
  model_prime = tf.keras.experimental.load_from_saved_model(saved_to_path)
  model_prime.summary()
  ```

  Args:
    saved_model_path: a string specifying the path to an existing SavedModel.

  Returns:
    a keras.Model instance.
  """
    # restore model topology from json string
    model_json_filepath = os.path.join(
        compat.as_bytes(saved_model_path),
        compat.as_bytes(constants.ASSETS_DIRECTORY),
        compat.as_bytes(constants.SAVED_MODEL_FILENAME_JSON))
    model_json = file_io.read_file_to_string(model_json_filepath)
    model = model_from_json(model_json)

    # restore model weights
    checkpoint_prefix = os.path.join(
        compat.as_text(saved_model_path),
        compat.as_text(constants.VARIABLES_DIRECTORY),
        compat.as_text(constants.VARIABLES_FILENAME))
    model.load_weights(checkpoint_prefix)
    return model
Пример #5
0
def load_from_saved_model(saved_model_path):
  """Loads a keras.Model from a SavedModel created by keras export().

  This function reinstantiates model state by:
  1) loading model topology from json (this will eventually come
     from metagraph).
  2) loading model weights from checkpoint.

  Example:

  ```python
  import tensorflow as tf

  # Create a tf.keras model.
  model = tf.keras.Sequential()
  model.add(tf.keras.layers.Dense(1, input_shape=[10]))
  model.summary()

  # Save the tf.keras model in the SavedModel format.
  saved_to_path = tf.keras.experimental.export(
        model, '/tmp/my_simple_tf_keras_saved_model')

  # Load the saved keras model back.
  model_prime = tf.keras.experimental.load_from_saved_model(saved_to_path)
  model_prime.summary()
  ```

  Args:
    saved_model_path: a string specifying the path to an existing SavedModel.

  Returns:
    a keras.Model instance.
  """
  # restore model topology from json string
  model_json_filepath = os.path.join(
      compat.as_bytes(saved_model_path),
      compat.as_bytes(constants.ASSETS_DIRECTORY),
      compat.as_bytes(constants.SAVED_MODEL_FILENAME_JSON))
  model_json = file_io.read_file_to_string(model_json_filepath)
  model = model_from_json(model_json)

  # restore model weights
  checkpoint_prefix = os.path.join(
      compat.as_text(saved_model_path),
      compat.as_text(constants.VARIABLES_DIRECTORY),
      compat.as_text(constants.VARIABLES_FILENAME))
  model.load_weights(checkpoint_prefix)
  return model
Пример #6
0
    def load_model_local(self, model_sha1):
        """
        Load model from local filesystem
        """

        model = self.get_model(model_sha1)

        if model and model.get('model') and model.get(
                'class_indices') and model.get('status') == self.READY:
            logging.error(
                'Successfully loading model {model_sha1}: model'.format(
                    model_sha1=model_sha1))
            return model

        model_path = self.get_model_path(model_sha1)
        model_path_weights = os.path.join(model_path, 'model')
        model_path_json = os.path.join(model_path, 'model.json')
        model_class_indices = os.path.join(model_path, 'class_indices.json')

        if not os.path.exists(model_path) or not os.path.exists(
                model_path_json) or not os.path.exists(model_class_indices):
            logging.debug(
                'Not loading model {model_sha1}: not all paths exists'.format(
                    model_sha1=model_sha1))
            return model

        model = self.models[model_sha1] = {}

        with open(model_class_indices, "r") as json_file:
            model['class_indices'] = json.load(json_file)

        with open(model_path_json, "r") as json_file:
            model['model'] = model_from_json(json_file.read())

        model['model'].load_weights(model_path_weights)

        # Compile model for use optimizers
        model['model'].compile(optimizer=self.__get_optimizer(),
                               loss='categorical_crossentropy',
                               metrics=['categorical_accuracy', 'accuracy'])

        model['status'] = self.READY

        return model