Beispiel #1
0
def modelsConfig_qa(model):
    ## Question Answering:
    if model == "distilbert-base-cased-distilled-squad":
        #model_selected = qa_pipline("question-answering", model=f"{model}")
        model_selected = qa_pipeline('question-answering',
                                     model=f'./models/{model}/',
                                     tokenizer=f'./models/{model}/')

    elif model == "bert-large-uncased-whole-word-masking-finetuned-squad":
        #model_selected = qa_pipline("question-answering", model=f"{model}")
        model_selected = qa_pipeline('question-answering',
                                     model=f'./models/{model}/',
                                     tokenizer=f'./models/{model}/')

    # Multilingual:
    elif model == "mrm8488/bert-multi-cased-finetuned-xquadv1 [multilingual]":
        model = "bert-multi-cased-finetuned-xquadv1"
        #model_selected = qa_pipline("question-answering", model=f"{model}")
        model_selected = qa_pipeline('question-answering',
                                     model=f'./models/{model}/',
                                     tokenizer=f'./models/{model}/')

    else:
        raise Exception("Not a valid model")
    return model_selected
Beispiel #2
0
def modelsConfig_qa(model):
    ## Question Answering:
    if model == "ELMo-BiDAF (Trained on SQuAD)":
        model_selected = Predictor.from_path(
            "https://storage.googleapis.com/allennlp-public-models/bidaf-elmo-model-2020.03.19.tar.gz"
        )

    elif model == "BiDAG (Trained on SQuAD)":
        model_selected = Predictor.from_path(
            "https://storage.googleapis.com/allennlp-public-models/bidaf-model-2020.03.19.tar.gz"
        )

    elif model == "Transformer QA (Trained on SQuAD)":
        model_selected = Predictor.from_path(
            "https://storage.googleapis.com/allennlp-public-models/transformer-qa-2020-05-26.tar.gz"
        )

    elif model == "distilbert-base-cased-distilled-squad":
        model_selected = qa_pipeline("question-answering", model=f"{model}")

    elif model == "bert-large-uncased-whole-word-masking-finetuned-squad":
        model_selected = qa_pipeline("question-answering", model=f"{model}")

    # Multilingual:
    elif model == "mrm8488/bert-multi-cased-finetuned-xquadv1 [multilingual]":
        model = "mrm8488/bert-multi-cased-finetuned-xquadv1"
        model_selected = qa_pipeline("question-answering", model=f"{model}")

    else:
        raise Exception("Not a valid model")
    return model_selected
Beispiel #3
0
from question_generation.pipelines import pipeline as qg_pipeline
from transformers import pipeline as qa_pipeline
from transformers import AutoTokenizer, AutoModelWithLMHead

import os

model_name_qa = 'distilbert-base-cased-distilled-squad'
qa_pipeline('question-answering',
            model=f'./models/{model_name_qa}/',
            tokenizer=f'./models/{model_name_qa}/')

model_name_qg = 'Question generation (without answer supervision) [small]'
qg_pipeline("e2e-qg",
            model=f'./models/{model_name_qg}/',
            tokenizer=f'./models/{model_name_qg}/')