Exemplo n.º 1
0
  def __init__(self, params, name=None):
    """Initialize layers to build Transformer model.

    Args:
      params: hyperparameter object defining layer sizes, dropout values, etc.
      name: name of the model.
    """
    super(Transformer, self).__init__(name=name)
    self.params = params
    self.embedding_softmax_layer = embedding_layer.EmbeddingSharedWeights(
        params["vocab_size"], params["hidden_size"])
    self.encoder_stack = EncoderStack(params)
    self.decoder_stack = DecoderStack(params)
Exemplo n.º 2
0
    def test_embedding_shared_weights(self):
        vocab_size = 50
        hidden_size = 64
        length = 2
        layer = embedding_layer.EmbeddingSharedWeights(vocab_size, hidden_size)
        self.assertDictEqual(layer.get_config(), {
            "vocab_size": 50,
            "hidden_size": 64,
        })

        idx = tf.ones([1, length], dtype="int32")
        y = layer(idx)
        self.assertEqual(y.shape, (
            1,
            length,
            hidden_size,
        ))
        x = tf.ones([1, length, hidden_size])
        output = layer(x, "linear")
        self.assertEqual(output.shape, (
            1,
            length,
            vocab_size,
        ))