def create_and_check_bert_for_pretraining( self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels ): model = TFBertForPreTraining(config=config) inputs = {"input_ids": input_ids, "attention_mask": input_mask, "token_type_ids": token_type_ids} result = model(inputs) self.parent.assertEqual(result.prediction_logits.shape, (self.batch_size, self.seq_length, self.vocab_size)) self.parent.assertEqual(result.seq_relationship_logits.shape, (self.batch_size, 2))
def create_and_check_bert_for_pretraining( self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels ): model = TFBertForPreTraining(config=config) inputs = {"input_ids": input_ids, "attention_mask": input_mask, "token_type_ids": token_type_ids} prediction_scores, seq_relationship_score = model(inputs) result = { "prediction_scores": prediction_scores.numpy(), "seq_relationship_score": seq_relationship_score.numpy(), } self.parent.assertListEqual( list(result["prediction_scores"].shape), [self.batch_size, self.seq_length, self.vocab_size] ) self.parent.assertListEqual(list(result["seq_relationship_score"].shape), [self.batch_size, 2])
def test_inference_masked_lm(self): model = TFBertForPreTraining.from_pretrained( "lysandre/tiny-bert-random") input_ids = tf.constant([[0, 1, 2, 3, 4, 5]]) output = model(input_ids)[0] expected_shape = [1, 6, 10] self.assertEqual(output.shape, expected_shape) print(output[:, :3, :3]) expected_slice = tf.constant([[ [0.03706957, 0.10124919, 0.03616843], [-0.06099961, 0.02266058, 0.00601412], [-0.06066202, 0.05684517, 0.02038802], ]]) tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=1e-4)