Exemple #1
0
def modelsConfig_qg(model):
    ## Question Generation:
    if model == "Question generation (without answer supervision) [small]":
        model_selected = qg_pipeline("e2e-qg",
                                     model="valhalla/t5-small-e2e-qg")

    elif model == "Question generation (without answer supervision) [base]":
        model_selected = qg_pipeline("e2e-qg", model="valhalla/t5-base-e2e-qg")

    else:
        raise Exception("Not a valid model")

    return model_selected
Exemple #2
0
from question_generation.pipelines import pipeline as qg_pipeline
from transformers import pipeline as qa_pipeline
from transformers import AutoTokenizer, AutoModelWithLMHead

import os

model_name_qa = 'distilbert-base-cased-distilled-squad'
qa_pipeline('question-answering',
            model=f'./models/{model_name_qa}/',
            tokenizer=f'./models/{model_name_qa}/')

model_name_qg = 'Question generation (without answer supervision) [small]'
qg_pipeline("e2e-qg",
            model=f'./models/{model_name_qg}/',
            tokenizer=f'./models/{model_name_qg}/')