Exemple #1
0
 def __init__(self):
     super(LargeModel, self).__init__()
     dim = 15
     n = 4 * 100
     self.emb = nn.Embedding(n, dim)
     self.lin1 = nn.Linear(dim, 1)
     self.seq = nn.Sequential(
         self.emb,
         self.lin1,
     )
 def __init__(self,
              n_token: int,
              n_head: int = 8,
              d_model: int = 512,
              d_ff: int = 2048):
     super().__init__()
     p_dropout = nn.ValueChoice([0.1, 0.2, 0.3, 0.4, 0.5],
                                label='p_dropout')
     n_layer = nn.ValueChoice([5, 6, 7, 8, 9], label='n_layer')
     self.encoder = nn.TransformerEncoder(
         nn.TransformerEncoderLayer(d_model, n_head, d_ff, p_dropout),
         n_layer)
     self.d_model = d_model
     self.decoder = nn.Linear(d_model, n_token)
     self.embeddings = nn.Embedding(n_token, d_model)
     self.position = PositionalEncoding(d_model)
Exemple #3
0
 def __init__(self, config):
     super(SNLIClassifier, self).__init__()
     self.config = config
     self.embed = nn.Embedding(config.n_embed, config.d_embed)
     self.projection = Linear(config.d_embed, config.d_proj)
     self.encoder = Encoder(config)
     self.dropout = nn.Dropout(p=config.dp_ratio)
     self.relu = nn.ReLU()
     seq_in_size = 2 * config.d_hidden
     if self.config.birnn:
         seq_in_size *= 2
     lin_config = [seq_in_size] * 2
     self.out = nn.Sequential(Linear(*lin_config), self.relu,
                              self.dropout, Linear(*lin_config),
                              self.relu, self.dropout,
                              Linear(*lin_config), self.relu,
                              self.dropout,
                              Linear(seq_in_size, config.d_out))
Exemple #4
0
 def __init__(self, config):
     super(SNLIClassifier, self).__init__()
     self.embed = nn.Embedding(config["n_embed"], config["d_embed"])
     self.projection = Linear(config["d_embed"], config["d_proj"])
     self.encoder = Encoder(config)
     self.dropout = nn.Dropout(p=config["dp_ratio"])
     self.relu = nn.ReLU()
     seq_in_size = 2 * config["d_hidden"]
     if config["birnn"]:
         seq_in_size *= 2
     lin_config = [seq_in_size] * 2
     self.out = nn.Sequential(Linear(*lin_config), self.relu,
                              self.dropout, Linear(*lin_config),
                              self.relu, self.dropout,
                              Linear(*lin_config), self.relu,
                              self.dropout,
                              Linear(seq_in_size, config["d_out"]))
     self.fix_emb = config["fix_emb"]
     self.project = config["projection"]