Example #1
0
def convert_model_saved(flags, folder, mode, weights_name='best_weights'):
    """Convert model to streaming and non streaming SavedModel.

  Args:
      flags: model and data settings
      folder: folder where converted model will be saved
      mode: inference mode
      weights_name: file name with model weights
  """
    tf.reset_default_graph()
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.Session(config=config)
    tf.keras.backend.set_session(sess)
    tf.keras.backend.set_learning_phase(0)
    flags.batch_size = 1  # set batch size for inference
    model = models.MODELS[flags.model_name](flags)
    weights_path = os.path.join(flags.train_dir, weights_name)
    model.load_weights(weights_path).expect_partial()

    path_model = os.path.join(flags.train_dir, folder)
    if not os.path.exists(path_model):
        os.makedirs(path_model)
    try:
        # convert trained model to SavedModel
        utils.model_to_saved(model, flags, path_model, mode)
    except IOError as e:
        logging.warning('FAILED to write file: %s', e)
    except (ValueError, AttributeError, RuntimeError, TypeError,
            AssertionError) as e:
        logging.warning('WARNING: failed to convert to SavedModel: %s', e)
Example #2
0
  def test_model_to_saved(self, model_name='dnn'):
    """SavedModel supports both stateless and stateful graphs."""
    params = model_params.HOTWORD_MODEL_PARAMS[model_name]
    params = model_flags.update_flags(params)

    # create model
    model = models.MODELS[params.model_name](params)
    utils.model_to_saved(model, params, FLAGS.test_tmpdir)
Example #3
0
 def test_model_to_saved(self):
     """SavedModel supports both stateless and stateful graphs."""
     utils.model_to_saved(self.model, self.flags, FLAGS.test_tmpdir)