def createSavedModel(self):
        model_dir = os.path.join(self.get_temp_dir(), "saved_model")
        with tf.Graph().as_default():
            x = tf_v1.placeholder(dtype=tf.float32, shape=[None, 3])
            w = tf_v1.get_variable("weights", shape=[])
            y = x * w
            tf_v1.add_to_collection(_EXTRA_COLLECTION, y)

            init_op = tf_v1.assign(w, 2)

            with tf_v1.Session() as session:
                session.run(init_op)
                tf_v1.saved_model.simple_save(
                    session,
                    model_dir,
                    inputs={"x": x},
                    outputs={"y": y},
                )
        return model_dir
Esempio n. 2
0
def text_module_fn():
  embeddings = [
      ("", [0, 0, 0, 0]),  # OOV items are mapped to this embedding.
      ("hello world", [1, 2, 3, 4]),
      ("pair-programming", [5, 5, 5, 5]),
  ]
  keys = tf.constant([item[0] for item in embeddings], dtype=tf.string)
  indices = tf.constant(list(range(len(embeddings))), dtype=tf.int64)
  tbl_init = KeyValueTensorInitializer(keys, indices)
  table = HashTable(tbl_init, 0)

  weights_initializer = tf.cast(
      tf.constant(list([item[1] for item in embeddings])), tf.float32)

  weights = tf_v1.get_variable(
      "weights", dtype=tf.float32, initializer=weights_initializer)

  text_tensor = tf_v1.placeholder(dtype=tf.string, name="text", shape=[None])
  indices_tensor = table.lookup(text_tensor)
  embedding_tensor = tf.gather(weights, indices_tensor)
  hub.add_signature(inputs=text_tensor, outputs=embedding_tensor)