Exemplo n.º 1
0
    def __init__(self, dim, n_labels):
        super().__init__()

        self.label_U_diag = create_parameter(n_labels, dim)
        self.label_W = create_parameter(n_labels, 2 * dim)
        self.label_b = create_parameter(n_labels)
        self.n_labels = n_labels
Exemplo n.º 2
0
    def __init__(self, lstm_type, char_vocab, char_emb_size, word_emb_size,
                 hidden_size, ff_dropout, recurrent_dropout,
                 dropout_char_linear, emb_dropout_type):
        super().__init__()
        self.char_lstm = EnhancedLSTM(lstm_type,
                                      char_emb_size,
                                      hidden_size,
                                      num_layers=1,
                                      ff_dropout=ff_dropout,
                                      recurrent_dropout=recurrent_dropout,
                                      bidirectional=False)
        self.char_embedding = nn.Embedding(len(char_vocab), char_emb_size)
        self.char_transform = torch.nn.Linear(hidden_size, word_emb_size)
        self.dropout_char_linear = dropout_char_linear
        self.locked_dropout = LockedDropout()

        if emb_dropout_type == "replace":
            self.drop_token = create_parameter(1, word_emb_size)
        elif emb_dropout_type == "zero":
            self.drop_token = torch.zeros(1, word_emb_size)
        else:
            raise "Unsupported embedding dropout type"
Exemplo n.º 3
0
 def __init__(self, dim):
     super().__init__()
     self.edge_U = create_parameter(dim, dim)
     self.edge_W = create_parameter(1, 2 * dim)
     self.edge_b = create_parameter(1)