Пример #1
0
    def __init__(self,
                 sequence_length,
                 nb_chars,
                 nb_per_word,
                 embedding_dim,
                 rnn_dim,
                 rnn_layers,
                 rnn_unit='gru',
                 dropout=0.1):
        super(Charrepresentation, self).__init__()
        self.sequence_length = sequence_length
        self.nb_chars = nb_chars
        self.nb_per_word = nb_per_word
        #print("nb_per_word", nb_per_word)
        self.embedding_dim = embedding_dim
        self.rnn_dim = rnn_dim
        if rnn_unit == 'gru':
            self.rnn = nn.GRU(embedding_dim,
                              rnn_dim,
                              rnn_layers,
                              bias=False,
                              batch_first=True,
                              dropout=dropout,
                              bidirectional=False)
        if rnn_unit == 'lstm':
            self.rnn = nn.lstm(embedding_dim,
                               rnn_dim,
                               rnn_layers,
                               bias=False,
                               batch_first=True,
                               dropout=dropout,
                               bidirectional=False)

        self.embedding = nn.Embedding(nb_chars, embedding_dim)
Пример #2
0
    def __init__(self, input_size, hidden_size, embedding_size, batch_size,
                 output_size, pretrained_embedding, num_layers, padding_idx,
                 bidirectional):
        super().__init__()

        # ajouter des asserts

        self.input_size = input_size
        self.hidden_size = hidden_size
        self.embedding_size = embedding_size
        self.batch_size = batch_size
        self.output_size = output_size
        self.pretrained_embedding = pretrained_embedding
        self.num_layers = num_layers
        self.padding_idex = padding_idex
        self.directions = 2 if bidirectional else 1

        # BIBI ALOVIOU SO METCH :kiss: :kiss_closed_eyes:
        self.embedding_layer = nn.Embedding(self.input_size,
                                            self.embedding_size,
                                            padding_idx=self.padding_idx)
        self.embedding_layer.weight.data.copy_(
            self.pretrained_embedding.weight.data)
        self.rnn = nn.lstm(input_size=self.embedding_size,
                           hidden_size=self.hidden_size,
                           num_layers=self.num_layers,
                           bidirectional=bidirectional)
Пример #3
0
 def __init__(self, input_size, hidden_size, num_layers, num_keys):
     super(Model, self).__init__()
     self.hidden_size = hidden_size
     self.num_layers = num_layers
     self.lstm = nn.lstm(input_size,
                         hidden_size,
                         num_layers,
                         batch_first=True)
     self.fc = nn.Linear(hidden_size, num_keys)
     self.first = True
Пример #4
0
 def __init__(self, input_size, hidden_size, num_layers, num_classes):
     super(BiLstm, self).__init__()
     self.hidden_size = hidden_size
     self.num_layers = num_layers
     self.lstm = nn.lstm(input_size,
                         hidden_size,
                         num_layers,
                         batch_first=True,
                         bidirectional=True)
     self.fc = nn.Linear(hidden_size * 2, num_classes)
Пример #5
0
    def __init(self, input_size, hidden_size, num_layers, num_classes):
        super(RNN, self).__init__()
        self.hidden_size =hidden_size
        self.num_layers = num_layers
        self.lstm = nn.lstm(input_size, hidden_size, num_layers, batch_size=True)


        self.fc = nn.Linear(hidden_size*sequence_length, num_classes)

        # Instad of using information from every hidden state, we  can use inforamtion form the last state. 
        self.fc = nn.Linear(hidden_size, num_classes)
Пример #6
0
 def __init__(self,
              input_size,
              hidden_size=100,
              rnn_unit='gru',
              dropout=0.1):
     super(ContextRepresentation, self).__init__()
     if rnn_unit == 'gru':
         self.rnn = nn.GRU(input_size,
                           hidden_size,
                           bias=False,
                           batch_first=True,
                           dropout=dropout,
                           bidirectional=True)
     if rnn_unit == 'lstm':
         self.rnn = nn.lstm(input_size,
                            hidden_size,
                            bias=False,
                            batch_first=True,
                            dropout=dropout,
                            bidirectional=True)
Пример #7
0
import numpy as np
import torch
import torch.nn as nn

model = nn.lstm()
Пример #8
0
 def forward(self, x, context, prev_state, embed):
     # context: [b x 1 x hidden*2]
     embedded = embed(x)  # embedded: [b x 1 x emb] if x: [b x 1]
     input = torch.cat([context, embedded], dim=2)
     output, h = nn.lstm(input)
     return output, h