Example #1
0
 def test_transfomer_kq_shared_bottleneck(self, is_kq_shared):
   feature = tf.random.uniform([2, 3, 512])
   layer = mobile_bert_encoder.TransformerLayer(
       key_query_shared_bottleneck=is_kq_shared)
   output = layer(feature)
   output_shape = output.shape.as_list()
   expected_shape = [2, 3, 512]
   self.assertListEqual(output_shape, expected_shape, msg=None)
Example #2
0
 def test_transfomer_return_attention_score(self):
   sequence_length = 5
   num_attention_heads = 8
   feature = tf.random.uniform([2, sequence_length, 512])
   layer = mobile_bert_encoder.TransformerLayer(
       num_attention_heads=num_attention_heads)
   _, attention_score = layer(feature, return_attention_scores=True)
   expected_shape = [2, num_attention_heads, sequence_length, sequence_length]
   self.assertListEqual(attention_score.shape.as_list(), expected_shape,
                        msg=None)
Example #3
0
 def test_transfomer_with_mask(self):
   feature = tf.random.uniform([2, 3, 512])
   input_mask = [[[0., 0., 1.], [0., 0., 1.], [0., 0., 1.]],
                 [[0., 1., 1.], [0., 1., 1.], [0., 1., 1.]]]
   input_mask = np.asarray(input_mask)
   layer = mobile_bert_encoder.TransformerLayer()
   output = layer(feature, input_mask)
   output_shape = output.shape.as_list()
   expected_shape = [2, 3, 512]
   self.assertListEqual(output_shape, expected_shape, msg=None)