def model_load(self, path): s3_model_url = 'https://distilbert-finetuned-model.s3.eu-west-2.amazonaws.com/pytorch_model.bin' path_to_model = download_model(s3_model_url, model_name="pytorch_model.bin") config = DistilBertConfig.from_pretrained(path + "/config.json") tokenizer = DistilBertTokenizer.from_pretrained(path, do_lower_case=self.do_lower_case) model = DistilBertForQuestionAnswering.from_pretrained(path_to_model, from_tf=False, config=config) return model, tokenizer
def model_load(self, path): s3_model_url = 'https://storage.googleapis.com/bertpepper/pepperqa/pytorch_model.bin' print('******************************************') path_to_model = download_model(s3_model_url, model_name="pytorch_model.bin") qa_pipeline = pipeline("question-answering", model=path, tokenizer=path) return qa_pipeline
embedding_output = self.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids ) encoder_outputs = self.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, ) sequence_output = encoder_outputs[0] pooled_output = self.pooler(sequence_output) outputs = (sequence_output, pooled_output,) + encoder_outputs[ 1: ] # add hidden_states and attentions if they are here return outputs # sequence_output, pooled_output, (hidden_states), (attentions) if __name__ == "__main__": import logging from download import download_model logging.basicConfig(level=logging.INFO) download_model('medical_character_bert') path = "pretrained-models/medical_character_bert/" model = CharacterBertModel.from_pretrained(path) logging.info('%s', model)
def download(uid, path): path.mkdir(parents=True, exist_ok=True) try: download_model(uid, str(path)) except Exception as exc: print(f"Failed to download {uid}: {exc}")