Пример #1
0
 def forward_batch(self, x, mask):
     """
     :param x: (batch, length, dim)
     :param mask: (batch, length, )
     :return: (batch, length, hidden_dim)
     """
     # conv_after_length = length - kernel + 2 * padding_size + 1
     new_x = x
     if self.padding_size > 0:
         # (padding_size + length + padding_size, dim)
         new_x = temporal_padding_3d(x, (self.padding_size, self.padding_size))
         # (batch, conv_after_length)
         mask = temporal_padding_mask(mask, kernel_size=self.kernel_size, padding_size=self.padding_size)
     elif self.padding_size == 0:
         # (batch, conv_after_length)
         mask = temporal_padding_mask(mask, kernel_size=self.kernel_size, padding_size=0)
     else:
         raise RuntimeError("Dilation Rate >= 0")
     # safe_x = temporal_padding_3d(x, (0, self.kernel_size - x.shape[1]))
     # safe_mask = T.ones((x.shape[0], ), dtype=theano.config.floatX).dimshuffle([0, 'x'])
     # !!! convert safe_mask from col to matrix
     # safe_mask = T.unbroadcast(safe_mask, 1)
     # x, mask = ifelse(T.gt(self.kernel_size - x.shape[1], 0),
     #                  (safe_x, safe_mask),
     #                  (new_x, mask))
     # (batch, conv_after_length, hidden_dim)
     conv_result = self.forward_conv_batch(new_x)
     # new_x = Print(new_x)
     # mask = Print()(mask)
     pooling_result = get_pooling_batch(conv_result, mask, self.pooling)
     dropout_out = dropout_from_layer(pooling_result, self.dropout)
     return self.act.activate(dropout_out + self.b)
Пример #2
0
 def forward_batch(self, x):
     """
     :param x: (batch, dim)
     """
     dropout_x = dropout_from_layer(x, self.dropout)
     # (batch, in) (in, hidden) + (None, hidden) -> (batch, hidden)
     return self.act.activate(T.dot(dropout_x, self.W.T) + self.b)
Пример #3
0
 def forward_sequence_batch(self, x, mask):
     dropout_x = dropout_from_layer(x, self.dropout)
     return T.concatenate([
         self.forward_scan_batch(dropout_x, mask),
         self.backward_scan_batch(dropout_x, mask),
     ],
                          axis=2)
Пример #4
0
 def forward_sequence(self, x):
     dropout_x = dropout_from_layer(x, self.dropout)
     return T.concatenate([
         self.forward_scan(dropout_x),
         self.backward_scan(dropout_x),
     ],
                          axis=1)
Пример #5
0
 def forward_sequence_batch(self, x, mask):
     """
     :param x: (batch, max_len, dim)
     :param mask:  (batch, max_len)
     """
     dropout_x = dropout_from_layer(x, self.dropout)
     return self.forward_scan_batch(dropout_x, mask)
Пример #6
0
 def forward_batch(self, x):
     """
     :param x: (batch, dim)
     """
     # (batch, in) (in, hidden) + (None, hidden) -> (batch, hidden)
     output = self.act.activate(T.dot(x, self.W.T) + self.b)
     return dropout_from_layer(output, self.dropout)
Пример #7
0
 def forward_batch(self, x):
     """
     :param x: (batch, in)
     """
     x_dropout = dropout_from_layer(x, self.dropout)
     # (batch, in) (in, in) + (in, ) -> (batch, in)
     t = self.transform_gate.activate(T.dot(x_dropout, self.W_T.T) + self.b_T)
     # (batch, in) (in, in) + (in, ) -> (batch, in)
     z_t = self.act.activate(T.dot(x_dropout, self.W_H.T) + self.b_H)
     # (batch, in) * (batch, in) + (batch, in) * (batch, in) -> (batch, in)
     return t * z_t + (1 - t) * x_dropout
Пример #8
0
 def forward(self, x):
     """
     :param x: (in, )
     """
     x_dropout = dropout_from_layer(x, self.dropout)
     # (in, in) (in, ) + (in, ) -> (in)
     t = self.transform_gate.activate(T.dot(self.W_T, x_dropout) + self.b_T)
     # (in, in) (in, ) + (in, ) -> (in)
     z_t = self.act.activate(T.dot(self.W_H, x_dropout) + self.b_H)
     # (in, ) * (in, ) + (in, ) * (in, ) -> (in, )
     return t * z_t + (1 - t) * x_dropout
Пример #9
0
 def forward(self, x):
     """
     :param x: (length, dim)
     :return: (hidden_dim, )
     """
     if self.padding_size > 0:
         # (padding_size + length + padding_size, dim)
         x = temporal_padding_2d(x, (self.padding_size, self.padding_size))
     safe_x = temporal_padding_2d(x, (0, self.kernel_size - x.shape[0]))
     # If Kernel Size is greater than sentence length, padding at the end of sentence
     x = ifelse(T.gt(self.kernel_size - x.shape[0], 0),
                safe_x,
                x)
     conv_result = self.forward_conv(x)
     pooling_result = get_pooling(conv_result, self.pooling)
     dropout_out = dropout_from_layer(pooling_result, self.dropout)
     return self.act.activate(dropout_out + self.b)
Пример #10
0
 def forward(self, input):
     dropout_input = dropout_from_layer(input, self.dropout)
     return T.nnet.softmax(T.dot(dropout_input, self.W) + self.b)
Пример #11
0
 def forward(self, input):
     dropout_input = dropout_from_layer(input, self.dropout)
     return T.nnet.sigmoid(T.dot(dropout_input, self.W) + self.b[0])
Пример #12
0
 def forward(self, x):
     """
     :param x: (dim, )
     """
     dropout_x = dropout_from_layer(x, self.dropout)
     return self.act.activate(T.dot(self.W, dropout_x) + self.b)
Пример #13
0
 def forward_sequence(self, x):
     dropout_x = dropout_from_layer(x, self.dropout)
     return self.forward_scan(dropout_x)
Пример #14
0
 def forward(self, x):
     """
     :param x: (dim, )
     """
     output = self.act.activate(T.dot(self.W, x) + self.b)
     return dropout_from_layer(output, self.dropout)