def answer(): from transformers import AutoTokenizer, MobileBertForQuestionAnswering, pipeline modelname = "csarron/mobilebert-uncased-squad-v2" model = MobileBertForQuestionAnswering.from_pretrained(modelname) tokenizer = AutoTokenizer.from_pretrained(modelname) nlp = pipeline("question-answering", model=model, tokenizer=tokenizer) with open("merkel.txt", "r") as file: context = file.read() response_bert = nlp({ "question": request.json["question"], "context": context }) answer = response_bert.get("answer", "No answer available.") confidence = response_bert.get("score", "No confidence available") response = app.response_class( response=f"{answer} (Confidence: {confidence:.4f})", status=200, mimetype="application/json") return response
def create_and_check_mobilebert_for_question_answering( self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels ): model = MobileBertForQuestionAnswering(config=config) model.to(torch_device) model.eval() result = model( input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, start_positions=sequence_labels, end_positions=sequence_labels, ) self.parent.assertEqual(result.start_logits.shape, (self.batch_size, self.seq_length)) self.parent.assertEqual(result.end_logits.shape, (self.batch_size, self.seq_length))
def create_and_check_mobilebert_for_question_answering( self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels): model = MobileBertForQuestionAnswering(config=config) model.to(torch_device) model.eval() loss, start_logits, end_logits = model( input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, start_positions=sequence_labels, end_positions=sequence_labels, ) result = { "loss": loss, "start_logits": start_logits, "end_logits": end_logits, } self.parent.assertListEqual(list(result["start_logits"].size()), [self.batch_size, self.seq_length]) self.parent.assertListEqual(list(result["end_logits"].size()), [self.batch_size, self.seq_length]) self.check_loss_output(result)