Example #1
0
    def testAssets(self):
        original_asset_file = os.path.join(self.get_temp_dir(), "hello.txt")
        _write_string_to_file(original_asset_file, "hello world")

        with tf.Graph().as_default() as graph:
            asset_tensor = tf.constant(original_asset_file, name="file")
            graph.add_to_collection(tf_v1.GraphKeys.ASSET_FILEPATHS,
                                    asset_tensor)
            saved_model_lib.add_signature("default", {},
                                          {"default": asset_tensor})

        handler = saved_model_lib.SavedModelHandler()
        handler.add_graph_copy(graph)

        export_dir = os.path.join(self.get_temp_dir(), "exported")
        handler.export(export_dir)

        # Check that asset file got written to the expected place:
        exported_asset_file = os.path.join(export_dir, "assets", "hello.txt")
        self.assertTrue(tf_v1.gfile.Exists(exported_asset_file))

        loaded_handler = saved_model_lib.load(export_dir)
        with _instantiate_meta_graph(loaded_handler).as_default():
            with tf_v1.Session() as sess:
                self.assertEqual(sess.run("file:0"),
                                 tf.compat.as_bytes(exported_asset_file))
Example #2
0
    def testWithMultipleAssetsWithSameBasename(self):
        tmp_asset_dir = os.path.join(self.get_temp_dir(), "asset")
        file_a = os.path.join(tmp_asset_dir, "a", "hello.txt")
        file_b = os.path.join(tmp_asset_dir, "b", "hello.txt")
        tf_v1.gfile.MakeDirs(os.path.dirname(file_a))
        tf_v1.gfile.MakeDirs(os.path.dirname(file_b))
        _write_string_to_file(file_a, "hello A")
        _write_string_to_file(file_b, "hello B")
        with tf.Graph().as_default() as graph:
            asset_a = tf.constant(file_a, name="file_a")
            asset_b = tf.constant(file_b, name="file_b")
            graph.add_to_collection(tf_v1.GraphKeys.ASSET_FILEPATHS, asset_a)
            graph.add_to_collection(tf_v1.GraphKeys.ASSET_FILEPATHS, asset_b)
            saved_model_lib.add_signature("default", {}, {"default": asset_a})

        export_dir = os.path.join(self.get_temp_dir(), "exported")
        handler = saved_model_lib.SavedModelHandler()
        handler.add_graph_copy(graph)
        handler.export(export_dir)
        tf_v1.gfile.DeleteRecursively(tmp_asset_dir)

        loaded_handler = saved_model_lib.load(export_dir)
        with _instantiate_meta_graph(loaded_handler).as_default():
            with tf_v1.Session() as sess:
                self.assertEqual(_read_file_to_string(sess.run("file_a:0")),
                                 "hello A")
                self.assertEqual(_read_file_to_string(sess.run("file_b:0")),
                                 "hello B")
  def testWithMultipleAssetsWithSameBasename(self):
    tmp_asset_dir = os.path.join(self.get_temp_dir(), "asset")
    file_a = os.path.join(tmp_asset_dir, "a", "hello.txt")
    file_b = os.path.join(tmp_asset_dir, "b", "hello.txt")
    tf.gfile.MakeDirs(os.path.dirname(file_a))
    tf.gfile.MakeDirs(os.path.dirname(file_b))
    _write_string_to_file(file_a, "hello A")
    _write_string_to_file(file_b, "hello B")
    with tf.Graph().as_default() as graph:
      asset_a = tf.constant(file_a, name="file_a")
      asset_b = tf.constant(file_b, name="file_b")
      graph.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, asset_a)
      graph.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, asset_b)

    export_dir = os.path.join(self.get_temp_dir(), "exported")
    handler = saved_model_lib.SavedModelHandler()
    handler.add_graph_copy(graph)
    handler.export(export_dir)
    tf.gfile.DeleteRecursively(tmp_asset_dir)

    loaded_handler = saved_model_lib.load(export_dir)
    with _instantiate_meta_graph(loaded_handler).as_default():
      with tf.Session() as sess:
        self.assertEqual(_read_file_to_string(sess.run("file_a:0")), "hello A")
        self.assertEqual(_read_file_to_string(sess.run("file_b:0")), "hello B")
Example #4
0
  def __call__(self, path):
    module_def_path = get_module_proto_path(path)
    module_def_proto = module_def_pb2.ModuleDef()
    with tf_v1.gfile.Open(module_def_path, "rb") as f:
      module_def_proto.ParseFromString(f.read())

    if module_def_proto.format != module_def_pb2.ModuleDef.FORMAT_V3:
      raise ValueError("Unsupported module def format: %r" %
                       module_def_proto.format)

    required_features = set(module_def_proto.required_features)
    unsupported_features = (required_features - _MODULE_V3_SUPPORTED_FEATURES)

    if unsupported_features:
      raise ValueError("Unsupported features: %r" % list(unsupported_features))

    saved_model_handler = saved_model_lib.load(path)
    checkpoint_filename = saved_model_lib.get_variables_path(path)
    return _ModuleSpec(saved_model_handler, checkpoint_filename)
Example #5
0
  def __call__(self, path):
    module_def_path = get_module_proto_path(path)
    module_def_proto = module_def_pb2.ModuleDef()
    with tf.gfile.Open(module_def_path, "rb") as f:
      module_def_proto.ParseFromString(f.read())

    if module_def_proto.format != module_def_pb2.ModuleDef.FORMAT_V3:
      raise ValueError("Unsupported module def format: %r" %
                       module_def_proto.format)

    required_features = set(module_def_proto.required_features)
    unsupported_features = (required_features - _MODULE_V3_SUPPORTED_FEATURES)

    if unsupported_features:
      raise ValueError("Unsupported features: %r" % list(unsupported_features))

    saved_model_handler = saved_model_lib.load(path)
    checkpoint_filename = saved_model_lib.get_variables_path(path)
    return _ModuleSpec(saved_model_handler, checkpoint_filename)
Example #6
0
def create_module_spec_from_saved_model(saved_model_path,
                                        drop_collections=None):
    """Experimental: Create a ModuleSpec out of a SavedModel from TF1.

  Warning: Deprecated. This belongs to the hub.Module API and TF1 Hub format.
  For TF2, TensorFlow Hub ships plain SavedModels, removing the need for
  conversions like this.

  Define a ModuleSpec from a SavedModel. Note that this is not guaranteed to
  work in all cases and it assumes the SavedModel has followed some conventions:

  - The serialized SaverDef can be ignored and instead can be reconstructed.
  - The init op and main op can be ignored and instead the module can be
    initialized by using the conventions followed by
    `tf.train.MonitoredSession`.

  Note that the set of features supported can increase over time and have side
  effects that were not previously visible. The pattern followed to avoid
  surprises is forcing users to declare which features to ignore (even
  if they are not supported).

  Note that this function creates a ModuleSpec that when exported exports a
  Module (based on a modified copy of the original SavedModel) and not a
  SavedModel.

  THIS FUNCTION IS DEPRECATED.

  Args:
    saved_model_path: Directory with the SavedModel to use.
    drop_collections: Additionally list of collection to drop.

  Returns:
    A ModuleSpec.
  """
    saved_model_handler = saved_model_lib.load(saved_model_path)
    checkpoint_filename = saved_model_lib.get_variables_path(saved_model_path)

    drop_collections = (set(_ALWAYS_DROPPED_COLLECTIONS) |
                        (set(drop_collections) if drop_collections else set()))
    _drop_collections(saved_model_handler, drop_collections)

    return native_module._ModuleSpec(saved_model_handler, checkpoint_filename)  # pylint: disable=protected-access
Example #7
0
def load_module_spec(path):
    """Loads a ModuleSpec from the filesystem.

  Args:
    path: string describing the location of a module. There are several
          supported path encoding schemes:
          a) URL location specifying an archived module
            (e.g. http://domain/module.tgz)
          b) Any filesystem location of a module directory (e.g. /module_dir
             for a local filesystem). All filesystems implementations provided
             by Tensorflow are supported.

  Returns:
    A ModuleSpec.

  Raises:
    ValueError: on unexpected values in the module spec.
    tf.OpError: on file handling exceptions.
  """
    path = compressed_module_resolver.get_default().get_module_path(path)
    module_def_path = _get_module_proto_path(path)
    module_def_proto = module_def_pb2.ModuleDef()
    with tf.gfile.Open(module_def_path, "rb") as f:
        module_def_proto.ParseFromString(f.read())

    if module_def_proto.format != module_def_pb2.ModuleDef.FORMAT_V3:
        raise ValueError("Unsupported module def format: %r" %
                         module_def_proto.format)

    required_features = set(module_def_proto.required_features)
    unsupported_features = (required_features - _MODULE_V3_SUPPORTED_FEATURES)

    if unsupported_features:
        raise ValueError("Unsupported features: %r" %
                         list(unsupported_features))

    saved_model_handler = saved_model_lib.load(path)
    checkpoint_filename = saved_model_lib.get_variables_path(path)
    return _ModuleSpec(saved_model_handler, checkpoint_filename)
Example #8
0
def load_module_spec(path):
  """Loads a ModuleSpec from the filesystem.

  Args:
    path: string describing the location of a module. There are several
          supported path encoding schemes:
          a) URL location specifying an archived module
            (e.g. http://domain/module.tgz)
          b) Any filesystem location of a module directory (e.g. /module_dir
             for a local filesystem). All filesystems implementations provided
             by Tensorflow are supported.

  Returns:
    A ModuleSpec.

  Raises:
    ValueError: on unexpected values in the module spec.
    tf.OpError: on file handling exceptions.
  """
  path = registry.resolver(path)
  module_def_path = _get_module_proto_path(path)
  module_def_proto = module_def_pb2.ModuleDef()
  with tf.gfile.Open(module_def_path, "rb") as f:
    module_def_proto.ParseFromString(f.read())

  if module_def_proto.format != module_def_pb2.ModuleDef.FORMAT_V3:
    raise ValueError("Unsupported module def format: %r" %
                     module_def_proto.format)

  required_features = set(module_def_proto.required_features)
  unsupported_features = (required_features - _MODULE_V3_SUPPORTED_FEATURES)

  if unsupported_features:
    raise ValueError("Unsupported features: %r" % list(unsupported_features))

  saved_model_handler = saved_model_lib.load(path)
  checkpoint_filename = saved_model_lib.get_variables_path(path)
  return _ModuleSpec(saved_model_handler, checkpoint_filename)
  def testAssets(self):
    original_asset_file = os.path.join(self.get_temp_dir(), "hello.txt")
    _write_string_to_file(original_asset_file, "hello world")

    with tf.Graph().as_default() as graph:
      asset_tensor = tf.constant(original_asset_file, name="file")
      graph.add_to_collection(tf.GraphKeys.ASSET_FILEPATHS, asset_tensor)

    handler = saved_model_lib.SavedModelHandler()
    handler.add_graph_copy(graph)

    export_dir = os.path.join(self.get_temp_dir(), "exported")
    handler.export(export_dir)

    # Check that asset file got written to the expected place:
    exported_asset_file = os.path.join(export_dir, "assets", "hello.txt")
    self.assertTrue(tf.gfile.Exists(exported_asset_file))

    loaded_handler = saved_model_lib.load(export_dir)
    with _instantiate_meta_graph(loaded_handler).as_default():
      with tf.Session() as sess:
        self.assertEqual(sess.run("file:0"),
                         tf.compat.as_bytes(exported_asset_file))
Example #10
0
def create_module_spec_from_saved_model(saved_model_path,
                                        drop_collections=None):
  """Experimental: Create a ModuleSpec out of a SavedModel.

  Define a ModuleSpec from a SavedModel. Note that this is not guaranteed to
  work in all cases and it assumes the SavedModel has followed some conventions:

  - The serialized SaverDef can be ignored and instead can be reconstructed.
  - The init op and main op can be ignored and instead the module can be
    initialized by using the conventions followed by
    `tf.train.MonitoredSession`.

  Note that the set of features supported can increase over time and have side
  effects that were not previously visible. The pattern followed to avoid
  surprises is forcing users to declare which features to ignore (even
  if they are not supported).

  Note that this function creates a ModuleSpec that when exported exports a
  Module (based on a modified copy of the original SavedModel) and not a
  SavedModel.

  Args:
    saved_model_path: Directory with the SavedModel to use.
    drop_collections: Additionally list of collection to drop.

  Returns:
    A ModuleSpec.
  """
  saved_model_handler = saved_model_lib.load(saved_model_path)
  checkpoint_filename = saved_model_lib.get_variables_path(saved_model_path)

  drop_collections = (set(_ALWAYS_DROPPED_COLLECTIONS) |
                      (set(drop_collections) if drop_collections else set()))
  _drop_collections(saved_model_handler, drop_collections)

  return native_module._ModuleSpec(saved_model_handler, checkpoint_filename)  # pylint: disable=protected-access
Example #11
0
 def _module_def_proto_to_module_spec(self, path):
     saved_model_handler = saved_model_lib.load(path)
     checkpoint_filename = saved_model_lib.get_variables_path(path)
     return _ModuleSpec(saved_model_handler, checkpoint_filename)