Ejemplo n.º 1
0
 def create_and_check_electra_for_sequence_classification(
     self,
     config,
     input_ids,
     token_type_ids,
     input_mask,
     sequence_labels,
     token_labels,
     choice_labels,
     fake_token_labels,
 ):
     config.num_labels = self.num_labels
     model = ElectraForSequenceClassification(config)
     model.to(torch_device)
     model.eval()
     loss, logits = model(input_ids,
                          attention_mask=input_mask,
                          token_type_ids=token_type_ids,
                          labels=sequence_labels)
     result = {
         "loss": loss,
         "logits": logits,
     }
     self.parent.assertListEqual(list(result["logits"].size()),
                                 [self.batch_size, self.num_labels])
     self.check_loss_output(result)
 def create_and_check_electra_for_sequence_classification(
     self,
     config,
     input_ids,
     token_type_ids,
     input_mask,
     sequence_labels,
     token_labels,
     choice_labels,
     fake_token_labels,
 ):
     config.num_labels = self.num_labels
     model = ElectraForSequenceClassification(config)
     model.to(torch_device)
     model.eval()
     result = model(input_ids, attention_mask=input_mask, token_type_ids=token_type_ids, labels=sequence_labels)
     self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_labels))
Ejemplo n.º 3
0
 def __init__(self,
              config: ElectraConfig,
              embeddings,
              discriminator=None,
              embed_layer=None):
     super().__init__()
     self.embed_layer = nn.Embedding(num_embeddings=config.vocab_size,
                                     embedding_dim=config.embedding_size,
                                     padding_idx=config.vocab_size - 1)
     if embed_layer:
         self.embed_layer.load_state_dict(torch.load(embed_layer))
     else:
         self.embed_layer.weight = nn.Parameter(embeddings)
     if discriminator:
         self.discriminator = ElectraForSequenceClassification.from_pretrained(
             discriminator, config=config)
     else:
         self.discriminator = ElectraForSequenceClassification(config)
     self.softmax = nn.Softmax(1)