コード例 #1
0
    def __init__(self, n_enc_vocab, n_dec_vocab, n_hidden,
                 n_layers=1, bidirectional=False, linearTransform=True):
        super(Seq2SeqLSTM_Attention, self).__init__()

        self.n_hidden = n_hidden
        self.num_directions = 2 if bidirectional is True else 1

        self.encoder = nb.LSTM(input_size=n_enc_vocab, hidden_size=n_hidden, bidirectional=bidirectional)
        self.decoder = nb.LSTM(input_size=n_dec_vocab, hidden_size=n_hidden, bidirectional=bidirectional)

        self.attention = nb.AttentionTwo(n_dec_vocab, n_hidden,
                                         n_layers=n_layers, bidirectional=bidirectional, linearTransform=linearTransform)
コード例 #2
0
    def __init__(self, n_enc_vocab, n_dec_vocab, n_hidden, bidirectional,
                 linearTransform):
        super(Seq2SeqBiGRU_Attention, self).__init__()

        self.n_hidden = n_hidden
        self.num_directions = 2 if bidirectional is True else 1

        self.encoder = nb.GRU(input_size=n_enc_vocab,
                              hidden_size=n_hidden,
                              bidirectional=bidirectional)
        self.decoder = nb.GRU(input_size=n_dec_vocab,
                              hidden_size=n_hidden,
                              bidirectional=bidirectional)

        self.attention = nb.AttentionTwo(n_dec_vocab,
                                         n_hidden,
                                         bidirectional=bidirectional,
                                         linearTransform=linearTransform)