示例#1
0
def base_lm():
    from transformers import DebertaTokenizer, DebertaForMaskedLM
    import torch
    tokenizer = DebertaTokenizer.from_pretrained('microsoft/deberta-base')
    model = DebertaForMaskedLM.from_pretrained('microsoft/deberta-base')
    inputs = tokenizer("The capital of France is [MASK].", return_tensors="pt")
    labels = tokenizer("The capital of France is Paris.",
                       return_tensors="pt")["input_ids"]
    outputs = model(**inputs, labels=labels)
    loss = outputs.loss
    logits = outputs.logits
    print(loss)
    print(logits)
    def create_and_check_deberta_for_masked_lm(
        self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels, choice_labels
    ):
        model = DebertaForMaskedLM(config=config)
        model.to(torch_device)
        model.eval()
        result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels)

        self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.vocab_size))