def _test_TFFunnel(self, size, large=False): from transformers import FunnelTokenizer, TFFunnelForQuestionAnswering tokenizer = FunnelTokenizer.from_pretrained(size) model = TFFunnelForQuestionAnswering.from_pretrained(size) question, text = "Who was Jim Henson?", "Jim Henson was a nice puppet" input_dict = tokenizer(question, text, return_tensors='tf') spec, input_dict = self.spec_and_pad(input_dict, 128) outputs = ["start_logits", "end_logits"] self.run_test(model, input_dict, input_signature=spec, outputs=outputs, rtol=1e-5)
def create_and_check_for_question_answering( self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels, ): model = TFFunnelForQuestionAnswering(config=config) inputs = {"input_ids": input_ids, "attention_mask": input_mask, "token_type_ids": token_type_ids} result = model(inputs) self.parent.assertEqual(result.start_logits.shape, (self.batch_size, self.seq_length)) self.parent.assertEqual(result.end_logits.shape, (self.batch_size, self.seq_length))