Ejemplo n.º 1
0
 def create_and_check_mobilebert_for_pretraining(
     self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
 ):
     model = TFMobileBertForPreTraining(config=config)
     inputs = {"input_ids": input_ids, "attention_mask": input_mask, "token_type_ids": token_type_ids}
     result = model(inputs)
     self.parent.assertEqual(
         result.prediction_logits.shape, (self.batch_size, self.seq_length, self.vocab_size)
     )
     self.parent.assertEqual(result.seq_relationship_logits.shape, (self.batch_size, 2))
Ejemplo n.º 2
0
    def test_inference_masked_lm(self):
        model = TFMobileBertForPreTraining.from_pretrained(
            "google/mobilebert-uncased")
        input_ids = tf.constant([[0, 1, 2, 3, 4, 5]])
        output = model(input_ids)[0]

        expected_shape = [1, 6, 30522]
        self.assertEqual(output.shape, expected_shape)

        expected_slice = tf.constant([[
            [-4.5919547, -9.248295, -9.645256],
            [-6.7306175, -6.440284, -6.6052837],
            [-7.2743506, -6.7847915, -6.024673],
        ]])
        tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=1e-4)