Exemplo n.º 1
0
    def __init__(self, n_enc_vocab, n_dec_vocab, n_hidden,
                 n_layers=1, bidirectional=False, linearTransform=True):
        super(Seq2SeqLSTM_Attention, self).__init__()

        self.n_hidden = n_hidden
        self.num_directions = 2 if bidirectional is True else 1

        self.encoder = nb.LSTM(input_size=n_enc_vocab, hidden_size=n_hidden, bidirectional=bidirectional)
        self.decoder = nb.LSTM(input_size=n_dec_vocab, hidden_size=n_hidden, bidirectional=bidirectional)

        self.attention = nb.AttentionTwo(n_dec_vocab, n_hidden,
                                         n_layers=n_layers, bidirectional=bidirectional, linearTransform=linearTransform)
Exemplo n.º 2
0
    def __init__(self, emb_dim,
                 n_class, n_hidden, n_layers=1, bidirectional=False, linearTransform=True):
        super(BiLSTM_Attention, self).__init__()

        self.n_hidden = n_hidden
        self.num_directions = 2 if bidirectional is True else 1

        self.lstm = nb.LSTM(emb_dim, n_hidden, bidirectional=bidirectional)
        self.attention = nb.AttentionOne(n_class, n_hidden
                                         , n_layers=n_layers, bidirectional=bidirectional, linearTransform=linearTransform)