def create_and_check_squeezebert_model( self, config, input_ids, input_mask, sequence_labels, token_labels, choice_labels ): model = SqueezeBertModel(config=config) model.to(torch_device) model.eval() result = model(input_ids, input_mask) result = model(input_ids) self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
def test_model_from_pretrained(self): for model_name in SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST[:1]: model = SqueezeBertModel.from_pretrained(model_name) self.assertIsNotNone(model)
from transformers import SqueezeBertTokenizer, SqueezeBertModel import torch tokenizer = SqueezeBertTokenizer.from_pretrained( 'squeezebert/squeezebert-mnli-headless') model = SqueezeBertModel.from_pretrained( 'squeezebert/squeezebert-mnli-headless') inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") outputs = model(**inputs) last_hidden_states = outputs.last_hidden_state