def download_with_model_type(datapath, model_type, version): ddir = os.path.join(get_model_dir(datapath), 'light_whoami') if not built(os.path.join(ddir, model_type), version): opt = {'datapath': datapath, 'model_type': model_type} fnames = ['model.tgz'] download_models( opt, fnames, 'light_whoami', version=version, use_model_type=True )
def download(datapath): ddir = os.path.join(get_model_dir(datapath), 'sea') model_type = 'bart_sq_gen' version = 'v1.0' if not built(os.path.join(ddir, model_type), version): opt = {'datapath': datapath, 'model_type': model_type} fnames = [f'model_{version}.tgz'] download_models(opt, fnames, 'sea', version=version, use_model_type=True)
def download(datapath): ddir = os.path.join(get_model_dir(datapath), 'msc') model_type = 'summsc_rag3B' version = 'v0.1' if not built(os.path.join(ddir, model_type), version): opt = {'datapath': datapath, 'model_type': model_type} fnames = [f'model_{version}.tar.gz'] download_models(opt, fnames, 'msc', version=version, use_model_type=True)
def download(datapath): model_name = 'pretrained_transformers' mdir = os.path.join(get_model_dir(datapath), model_name) version = 'v3.0' if not built(mdir, version): opt = {'datapath': datapath} fnames = ['pretrained_transformers.tgz'] download_models(opt, fnames, model_name, version=version, use_model_type=False)
def download(datapath): ddir = os.path.join(get_model_dir(datapath), 'blenderbot2') model_type = 'memory_decoder' version = 'v1.0' if not built(os.path.join(ddir, model_type), version): opt = {'datapath': datapath, 'model_type': model_type} fnames = ['model.tgz'] download_models( opt, fnames, 'blenderbot2', version=version, use_model_type=True )
def download(datapath): ddir = os.path.join(get_model_dir(datapath), 'hallucination') model_type = 'bart_rag_dpr_poly' version = 'v1.0' if not built(os.path.join(ddir, model_type), version): opt = {'datapath': datapath, 'model_type': model_type} fnames = ['model.tgz'] download_models( opt, fnames, 'hallucination', version=version, use_model_type=True )
def download(datapath): model_name = 'tutorial_transformer_generator' mdir = os.path.join(get_model_dir(datapath), model_name) version = 'v1' if not built(mdir, version): opt = {'datapath': datapath} fnames = ['tutorial_transformer_generator_v1.tar.gz'] download_models( opt, fnames, model_name, version=version, use_model_type=False, )
def download(datapath): ddir = os.path.join(get_model_dir(datapath), 'tod_base_no_api') model_type = 'tod_base_no_api' version = 'v1.0' if not built(os.path.join(ddir, model_type), version): opt = {'datapath': datapath, 'model_type': model_type} fnames = ['model.tar.gz'] download_models( opt, fnames, 'tod', version=version, path='aws', use_model_type=True )
def download(datapath): ddir = os.path.join(get_model_dir(datapath), 'hallucination') version = 'v1.0' if not built(ddir, version): opt = {'datapath': datapath, 'model_type': 'wow_passages'} fnames = ['wow_articles.paragraphs.tgz', 'exact.tgz', 'compressed.tgz'] download_models(opt, fnames, 'hallucination', version=version, use_model_type=True)
def download(datapath): ddir = os.path.join(get_model_dir(datapath), 'hallucination') model_type = 'wiki_index_exact' version = 'v1.0' if not built(os.path.join(ddir, model_type), version): opt = {'datapath': datapath, 'model_type': model_type} fnames = ['exact.tgz'] download_models(opt, fnames, 'hallucination', version=version, use_model_type=True)
def download(datapath): model_name = 'dodecadialogue' mdir = os.path.join(get_model_dir(datapath), model_name) version = 'v1.0' if not built(mdir, version): opt = {'datapath': datapath} fnames = ['dodecadialogue.tgz'] download_models(opt, fnames, model_name, version=version, use_model_type=False)
def download(datapath): model_name = 'pretrained_transformers' mdir = os.path.join(get_model_dir(datapath), model_name) opt = {'datapath': datapath} fnames = ['pretrained_transformers_v1.tar.gz'] download_models(opt, fnames, model_name, version='v1.0', use_model_type=False) print('Creating base models for bi and polyencoders') for pretrained_type in ['reddit', 'wikito']: path_cross = os.path.join(mdir, 'cross_model_huge_%s.mdl' % pretrained_type) path_bi = os.path.join(mdir, 'bi_model_huge_%s.mdl' % pretrained_type) path_poly = os.path.join(mdir, 'poly_model_huge_%s.mdl' % pretrained_type) create_bi_model(path_cross, path_bi) create_poly_model(path_cross, path_poly)
def download(datapath): ddir = os.path.join(get_model_dir(datapath), 'hallucination') model_type = 'multiset_dpr' version = 'v1.0' if not built(os.path.join(ddir, model_type), version): opt = {'datapath': datapath, 'model_type': model_type} fnames = ['hf_bert_base.cp'] download_models( opt, fnames, 'hallucination', version=version, use_model_type=True, path=path, )
def download(datapath): model_name = 'sensitive_topics_classifier' mdir = os.path.join(get_model_dir(datapath), model_name) version = 'v1' if not built(mdir, version): opt = {'datapath': datapath} fnames = ['sensitive_topics_classifier2.tgz'] download_models( opt, fnames, model_name, version=version, use_model_type=False, flatten_tar=True, )
def download(datapath): opt = {'datapath': datapath} model_name = 'unittest' mdir = os.path.join(get_model_dir(datapath), model_name) version = 'v6.1' model_filenames = [ 'seq2seq.tar.gz', 'transformer_ranker.tar.gz', 'transformer_generator2.tar.gz', 'memnn.tar.gz', 'apex_v1.tar.gz', 'test_bytelevel_bpe_v2.tar.gz', 'beam_blocking1.tar.gz', 'context_blocking1.tar.gz', ] if not built(mdir, version): download_models(opt, model_filenames, model_name, version=version)
def download(datapath, model_name): ddir = os.path.join(get_model_dir(datapath), 'dodecadialogue') dodeca_version = 'v2.0' if not built(ddir, dodeca_version): opt = {'datapath': datapath} fnames = ['dodecadialogue_v2.tgz'] download_models(opt, fnames, 'dodecadialogue', version=dodeca_version, use_model_type=False) model_version = 'v1.0' mdir = os.path.join(ddir, model_name) if not built(mdir, model_version): opt = {'datapath': datapath, 'model_type': model_name} fnames = [f'{model_name}.tgz'] download_models(opt, fnames, 'dodecadialogue', version=model_version, use_model_type=True)