Пример #1
0
 def __init__(self,
              input_size,
              hidden_size,
              num_layers,
              dropout_rate=0,
              dropout_output=False,
              rnn_type=nn.LSTM,
              concat_layers=False,
              use_tanh=1,
              bidirectional=True,
              res_net=False,
              get_all_layers=False):  # 训练都不padding 这里还要改一下
     super(StackedBRNN, self).__init__()
     self.dropout_output = dropout_output
     self.dropout_rate = dropout_rate
     self.num_layers = num_layers
     self.res_net = res_net
     self.concat_layers = concat_layers
     self.get_all_layers = get_all_layers
     self.rnns = nn.ModuleList()
     for i in range(num_layers):
         input_size = input_size if i == 0 else 2 * hidden_size
         #self.rnns.append(rnn_type(input_size, hidden_size,
         #                          num_layers=1,
         #                          bidirectional=True))
         self.rnns.append(
             MF.SRUCell(input_size,
                        hidden_size,
                        dropout=dropout_rate,
                        rnn_dropout=dropout_rate,
                        use_tanh=use_tanh,
                        bidirectional=bidirectional))
Пример #2
0
    def __init__(self,
                 input_size,
                 hidden_size,
                 num_layers,
                 dropout_rate=0,
                 dropout_output=False,
                 rnn_type=nn.LSTM,
                 concat_layers=False,
                 padding=False):
        super(StackedBRNN, self).__init__()
        self.padding = padding
        self.dropout_output = dropout_output
        self.dropout_rate = dropout_rate
        self.num_layers = num_layers
        self.concat_layers = concat_layers
        self.rnns = nn.ModuleList()
        self.lns = nn.ModuleList()
        for i in range(num_layers):
            input_size = input_size if i == 0 else 2 * hidden_size
            #self.rnns.append(rnn_type(input_size, hidden_size,
            #                          num_layers=1,
            #                          bidirectional=True))
            self.rnns.append(
                MF.SRUCell(input_size,
                           hidden_size,
                           dropout=dropout_rate,
                           rnn_dropout=dropout_rate,
                           use_tanh=1,
                           bidirectional=True))

            self.lns.append(LayerNorm(d_hid=2 * hidden_size))
Пример #3
0
 def __init__(self,
              input_size,
              hidden_size,
              num_layers=1,
              dropout=0,
              bidirectional=True,
              batch_first=True):
     super(SRU, self).__init__()
     self.sru = custom_nn.SRUCell(n_in=input_size,
                                  n_out=hidden_size,
                                  dropout=dropout,
                                  rnn_dropout=0,
                                  use_tanh=1,
                                  use_selu=0,
                                  bidirectional=bidirectional,
                                  layer_norm=False,
                                  rescale=False)