Ejemplo n.º 1
0
 def __init__(self, hidden_size):
     super(Attention, self).__init__(
         xh=L.Linear(in_size=None, out_size=hidden_size, initialW=GlorotNormal(), nobias=True),
         hh=L.Linear(in_size=None, out_size=hidden_size, initialW=GlorotNormal(), nobias=True),
         hw=L.Linear(hidden_size, 1, initialW=GlorotNormal(), nobias=True),
     )
     self.hidden_size = hidden_size
Ejemplo n.º 2
0
 def __init__(self, item_size, hidden_size):
     super(AttReDecoder, self).__init__(
         re=L.Linear(None, 2, initialW=GlorotNormal(), nobias=True),
         re_att=Attention(hidden_size),
         r_att=Attention(hidden_size),
         e_att=Attention(hidden_size),
         fy=L.Linear(None, item_size, initialW=GlorotNormal(), nobias=True)
     )
Ejemplo n.º 3
0
 def __init__(self, hidden_size):
     super(SelectiveGate, self).__init__(
         xh=L.Linear(in_size=None,
                     out_size=hidden_size,
                     initialW=GlorotNormal(),
                     nobias=True),
         hh=L.Linear(in_size=None,
                     out_size=hidden_size,
                     initialW=GlorotNormal(),
                     nobias=True),
     )
     self.hidden_size = hidden_size
Ejemplo n.º 4
0
    def __init__(self, n_layers, in_size, out_size, dropout, use_bi_direction,
                 **kwargs):
        argument.check_unexpected_kwargs(
            kwargs,
            use_cudnn='use_cudnn argument is not supported anymore. '
            'Use chainer.using_config')
        argument.assert_kwargs_empty(kwargs)

        weights = []
        direction = 2 if use_bi_direction else 1
        for i in six.moves.range(n_layers):
            for di in six.moves.range(direction):
                weight = link.Link()
                with weight.init_scope():
                    for j in six.moves.range(6):
                        if i == 0 and j < 3:
                            w_in = in_size
                        elif i > 0 and j < 3:
                            w_in = out_size * direction
                        else:
                            w_in = out_size
                        w = variable.Parameter(GlorotNormal(),
                                               (out_size, w_in))
                        b = variable.Parameter(0, (out_size, ))
                        setattr(weight, 'w%d' % j, w)
                        setattr(weight, 'b%d' % j, b)
                weights.append(weight)

        super(NStepGRUBase, self).__init__(*weights)

        self.n_layers = n_layers
        self.dropout = dropout
        self.out_size = out_size
        self.direction = direction
        self.rnn = rnn.n_step_bigru if use_bi_direction else rnn.n_step_gru
Ejemplo n.º 5
0
 def __init__(self, item_size, embed_size, hidden_size):
     super(NStepSelBiGRUEncoder, self).__init__(
         xe=L.EmbedID(item_size, embed_size,initialW=GlorotNormal(), ignore_label=-1),
         gru=NStepBiGRU(1, embed_size, hidden_size, 0.5),
         sel=SelectiveGate(2 * hidden_size)
     )
     self.hidden_size = hidden_size
Ejemplo n.º 6
0
 def __init__(self, output_dim, init_weights=False, filter_height=1):
     super(Alex, self).__init__()
     self.output_dim = output_dim
     self.filter_height = filter_height
     with self.init_scope():
         if init_weights:
             self.conv1 = L.Convolution2D(None,  96, (1, 3), 1, (0, 1), initialW=GlorotNormal())
             self.conv2 = L.Convolution2D(None, 256, (filter_height, 3), 1, (0, 1), initialW=GlorotNormal())
             self.conv3 = L.Convolution2D(None, 384, (1, 3), 1, (0, 1), initialW=GlorotNormal())
             self.conv4 = L.Convolution2D(None, 384, (1, 3), 1, (0, 1), initialW=GlorotNormal())
             self.conv5 = L.Convolution2D(None, 256, (1, 3), 1, (0, 1), initialW=GlorotNormal())
             self.fc6 = L.Linear(None, 4096, initialW=HeNormal())
             self.fc7 = L.Linear(None, 4096, initialW=HeNormal())
             self.fc8 = L.Linear(None, output_dim, initialW=HeNormal())
         else:
             self.conv1 = L.Convolution2D(None,  96, (1, 3), 1, (0, 1))
             self.conv2 = L.Convolution2D(None, 256, (filter_height, 3), 1, (0, 1))
             self.conv3 = L.Convolution2D(None, 384, (1, 3), 1, (0, 1))
             self.conv4 = L.Convolution2D(None, 384, (1, 3), 1, (0, 1))
             self.conv5 = L.Convolution2D(None, 256, (1, 3), 1, (0, 1))
             self.fc6 = L.Linear(None, 4096)
             self.fc7 = L.Linear(None, 4096)
             self.fc8 = L.Linear(None, output_dim)
Ejemplo n.º 7
0
 def __init__(self, item_size):
     super(GruDecoder, self).__init__(
         fy=L.Linear(None, item_size, initialW=GlorotNormal(), nobias=True))