示例#1
0
 def test_get_config(self):
     num_attention_heads = 2
     decoder_block = transformer.TransformerDecoderBlock(
         num_attention_heads=num_attention_heads,
         intermediate_size=32,
         intermediate_activation='relu',
         dropout_rate=0.1,
         attention_dropout_rate=0.1,
         use_bias=False,
         norm_first=True,
         norm_epsilon=1e-6,
         intermediate_dropout=0.1,
         attention_initializer=tf.keras.initializers.RandomUniform(
             minval=0., maxval=1.))
     decoder_block_config = decoder_block.get_config()
     new_decoder_block = transformer.TransformerDecoderBlock.from_config(
         decoder_block_config)
     self.assertEqual(decoder_block_config, new_decoder_block.get_config())
示例#2
0
 def test_decoder_block_with_cache(self):
     num_attention_heads = 2
     hidden_size = 16
     decoder_block = transformer.TransformerDecoderBlock(
         num_attention_heads=num_attention_heads,
         intermediate_size=32,
         intermediate_activation='relu',
         dropout_rate=0.1,
         attention_dropout_rate=0.1)
     # Forward path.
     dummy_tensor = tf.zeros([2, 4, 16], dtype=tf.float32)
     dummy_mask = tf.zeros([2, 4, 4], dtype=tf.float32)
     inputs = [dummy_tensor, dummy_tensor, dummy_mask, dummy_mask]
     cache = _create_cache(2, 0, num_attention_heads,
                           hidden_size // num_attention_heads)
     output, cache = decoder_block(inputs, cache)
     self.assertEqual(output.shape, (2, 4, hidden_size))
     self.assertEqual(cache['value'].shape, (2, 4, 2, 8))
示例#3
0
 def test_use_bias_norm_first(self):
     num_attention_heads = 2
     hidden_size = 16
     decoder_block = transformer.TransformerDecoderBlock(
         num_attention_heads=num_attention_heads,
         intermediate_size=32,
         intermediate_activation='relu',
         dropout_rate=0.1,
         attention_dropout_rate=0.1,
         use_bias=False,
         norm_first=True,
         norm_epsilon=1e-6,
         intermediate_dropout=0.1,
         attention_initializer=tf.keras.initializers.RandomUniform(
             minval=0., maxval=1.))
     # Forward path.
     dummy_tensor = tf.zeros([2, 4, 16], dtype=tf.float32)
     dummy_mask = tf.zeros([2, 4, 4], dtype=tf.float32)
     inputs = [dummy_tensor, dummy_tensor, dummy_mask, dummy_mask]
     output, _ = decoder_block(inputs)
     self.assertEqual(output.shape, (2, 4, hidden_size))