Exemple #1
0
  def testConvertTfKerasFunctionalModelWithWeightsSavedAsSavedModel(self):
    with tf.Graph().as_default(), tf.compat.v1.Session():
      model = self._createFunctionalModelWithWeights()
      old_model_json = json.loads(model.to_json())
      old_weights = model.get_weights()
      keras.experimental.export_saved_model(
          model, self._tmp_dir)

      # Convert the keras SavedModel to tfjs format.
      tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')
      converter.dispatch_keras_saved_model_to_tensorflowjs_conversion(
          self._tmp_dir, tfjs_output_dir)

      # Verify the size of the weight file.
      weight_path = glob.glob(os.path.join(tfjs_output_dir, 'group*-*'))[0]
      weight_file_bytes = os.path.getsize(weight_path)
      model_weight_bytes = sum(w.size * 4 for w in model.get_weights())
      self.assertEqual(weight_file_bytes, model_weight_bytes)

    with tf.Graph().as_default(), tf.compat.v1.Session():
      # Load the converted mode back.
      model_json_path = os.path.join(tfjs_output_dir, 'model.json')
      model_prime = keras_tfjs_loader.load_keras_model(model_json_path)
      new_weights = model_prime.get_weights()

      # Check the equality of the old and new model JSONs.
      self.assertEqual(old_model_json, json.loads(model_prime.to_json()))

      # Check the equality of the old and new weights.
      self.assertAllClose(old_weights, new_weights)
Exemple #2
0
  def testConvertTfKerasSequentialCompiledAndSavedAsSavedModel(self):
    with tf.Graph().as_default(), tf.compat.v1.Session():
      model = self._createSimpleSequentialModel()
      # Compile the model before saving.
      model.compile(
          loss='binary_crossentropy',
          optimizer=tf.compat.v1.train.GradientDescentOptimizer(2.5e-3))

      old_model_json = json.loads(model.to_json())
      old_weights = model.get_weights()
      keras.experimental.export_saved_model(model, self._tmp_dir)

      # Convert the keras SavedModel to tfjs format.
      tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')
      converter.dispatch_keras_saved_model_to_tensorflowjs_conversion(
          self._tmp_dir, tfjs_output_dir)

      # Verify the size of the weight file.
      weight_path = glob.glob(os.path.join(tfjs_output_dir, 'group*-*'))[0]
      weight_file_bytes = os.path.getsize(weight_path)
      model_weight_bytes = sum(w.size * 4 for w in model.get_weights())
      self.assertEqual(weight_file_bytes, model_weight_bytes)

    with tf.Graph().as_default(), tf.compat.v1.Session():
      # Load the converted mode back.
      model_json_path = os.path.join(tfjs_output_dir, 'model.json')
      model_prime = keras_tfjs_loader.load_keras_model(model_json_path)
      new_weights = model_prime.get_weights()

      # Check the equality of the old and new model JSONs.
      self.assertEqual(old_model_json, json.loads(model_prime.to_json()))

      # Check the equality of the old and new weights.
      self.assertAllClose(old_weights, new_weights)
Exemple #3
0
def export_tfjs(keras_or_saved_model,
                output_dir,
                tflite_filepath=None,
                **kwargs):
  """Exports saved model to tfjs.

  https://www.tensorflow.org/js/guide/conversion?hl=en

  Args:
    keras_or_saved_model: Keras or saved model.
    output_dir: Output TF.js model dir.
    tflite_filepath: str, file path to existing tflite model. If set, the
      metadata is extracted to the TF.js model.
    **kwargs: Other options.
  """
  # For Keras model, creates a saved model first in a temp dir. Otherwise,
  # convert directly.
  is_keras = isinstance(keras_or_saved_model, tf.keras.Model)
  with _create_temp_dir(is_keras) as temp_dir_name:
    # Export keras model to saved model and then convert to TFJS.
    if is_keras:
      keras_or_saved_model.save(
          temp_dir_name, include_optimizer=False, save_format='tf')
      path = temp_dir_name
    else:
      path = keras_or_saved_model

    # Extract metadata if tflite_filepath is provided.
    if tflite_filepath:
      metadata_json = extract_tflite_metadata_json(tflite_filepath)
      metadata = json.loads(metadata_json)
      kwargs.update(metadata=metadata)

    tfjs_converter.dispatch_keras_saved_model_to_tensorflowjs_conversion(
        path, output_dir, **kwargs)
Exemple #4
0
def export_tfjs(keras_saved_model, output_dir, **kwargs):
    """Exports saved model to tfjs.

  https://www.tensorflow.org/js/guide/conversion?hl=en

  Args:
    keras_saved_model: Saved model from Keras.
    output_dir: Output TF.js model dir.
    **kwargs: Other options.
  """
    tfjs_converter.dispatch_keras_saved_model_to_tensorflowjs_conversion(
        keras_saved_model, output_dir, **kwargs)
Exemple #5
0
  def testConvertTfKerasSequentialSavedAsSavedModelWithQuantization(self):
    with tf.Graph().as_default(), tf.compat.v1.Session():
      model = self._createSimpleSequentialModel()
      keras.experimental.export_saved_model(
          model, self._tmp_dir)

      # Convert the keras SavedModel to tfjs format.
      tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')
      converter.dispatch_keras_saved_model_to_tensorflowjs_conversion(
          self._tmp_dir, tfjs_output_dir, quantization_dtype=np.uint16)

      # Verify the size of the weight file.
      weight_path = glob.glob(os.path.join(tfjs_output_dir, 'group*-*'))[0]
      weight_file_bytes = os.path.getsize(weight_path)
      # Each uint16 number has 2 bytes.
      bytes_per_num = 2
      model_weight_bytes = sum(
          w.size * bytes_per_num for w in model.get_weights())
      self.assertEqual(weight_file_bytes, model_weight_bytes)
def export_tfjs(keras_or_saved_model, output_dir, **kwargs):
  """Exports saved model to tfjs.

  https://www.tensorflow.org/js/guide/conversion?hl=en

  Args:
    keras_or_saved_model: Keras or saved model.
    output_dir: Output TF.js model dir.
    **kwargs: Other options.
  """
  # For Keras model, creates a saved model first in a temp dir. Otherwise,
  # convert directly.
  is_keras = isinstance(keras_or_saved_model, tf.keras.Model)
  with _create_temp_dir(is_keras) as temp_dir_name:
    if is_keras:
      keras_or_saved_model.save(
          temp_dir_name, include_optimizer=False, save_format='tf')
      path = temp_dir_name
    else:
      path = keras_or_saved_model
    tfjs_converter.dispatch_keras_saved_model_to_tensorflowjs_conversion(
        path, output_dir, **kwargs)