예제 #1
0
def get_transformer(bert_model_type, output_hidden_states=False):
    config = get_bert_config(bert_model_type, output_hidden_states)
    if bert_model_type in [
            'bert-base-uncased', 'bert-base-cased', 'bert-large-uncased',
            'bert-large-uncased-whole-word-masking',
            'bert-large-uncased-whole-word-masking-finetuned-squad'
    ]:
        return TFBertModel.from_pretrained(BERT_MODEL_FILE[bert_model_type],
                                           config=config)
    elif bert_model_type in [
            'prod-bert-base-uncased', 'tune_bert-base-uncased_nsp'
    ]:
        return TFBertModel.from_pretrained(BERT_MODEL_FILE[bert_model_type],
                                           config=config,
                                           from_pt=True)
    elif bert_model_type in [
            'roberta-base', 'roberta-large', 'roberta-large-mnli',
            'distilroberta-base'
    ]:
        return TFRobertaModel.from_pretrained(BERT_MODEL_FILE[bert_model_type],
                                              config=config)
    elif bert_model_type in ['prod-roberta-base-cased']:
        return TFRobertaModel.from_pretrained(BERT_MODEL_FILE[bert_model_type],
                                              config=config,
                                              from_pt=True)
    elif bert_model_type in ['xlnet-base-cased']:
        return TFXLNetModel.from_pretrained(BERT_MODEL_FILE[bert_model_type],
                                            config=config)
    elif bert_model_type in [
            'albert-base-v1', 'albert-large-v1', 'albert-xlarge-v1',
            'albert-xxlarge-v1'
    ]:
        return TFAlbertModel.from_pretrained(BERT_MODEL_FILE[bert_model_type],
                                             config=config)
    elif bert_model_type in ['gpt2', 'gpt2-medium']:
        return TFGPT2Model.from_pretrained(BERT_MODEL_FILE[bert_model_type],
                                           config=config)
    elif bert_model_type in ['transfo-xl']:
        return TFTransfoXLModel.from_pretrained(
            BERT_MODEL_FILE[bert_model_type], config=config)
    elif bert_model_type in [
            'distilbert-base-uncased',
            'distilbert-base-uncased-distilled-squad'
    ]:
        return TFDistilBertModel.from_pretrained(
            BERT_MODEL_FILE[bert_model_type], config=config)
    else:
        raise ValueError(
            f'`bert_model_type` not understood: {bert_model_type}')
예제 #2
0
    def __init__(self,
                 pretrained_model_name_or_path='transfo-xl-wt103',
                 reduce_output='sum',
                 trainable=True,
                 **kwargs):
        super().__init__()
        try:
            from transformers import TFTransfoXLModel
        except ModuleNotFoundError:
            logger.error(
                ' transformers is not installed. '
                'In order to install all text feature dependencies run '
                'pip install ludwig[text]')
            sys.exit(-1)

        self.transformer = TFTransfoXLModel.from_pretrained(
            pretrained_model_name_or_path)
        self.reduce_output = reduce_output
        self.reduce_sequence = SequenceReducer(reduce_mode=reduce_output)
        self.transformer.trainable = trainable
예제 #3
0
 def test_model_from_pretrained(self):
     for model_name in TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
         model = TFTransfoXLModel.from_pretrained(model_name)
         self.assertIsNotNone(model)
예제 #4
0
 def test_model_from_pretrained(self):
     for model_name in list(
             TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
         model = TFTransfoXLModel.from_pretrained(model_name,
                                                  cache_dir=CACHE_DIR)
         self.assertIsNotNone(model)