예제 #1
0
 def output(self, x):
     if self.on_memory:
         ret_tensor = self.onehot_list[x.flatten()].reshape((x.shape[0], x.shape[1], self.vocab_size))
     else:
         ret_tensor = onehot_tensor(x, self.vocab_size)
     if self.zero_index != None:
         mask = T.neq(x, self.zero_index)
         ret_tensor *= mask[:, :, None]
     return ret_tensor
예제 #2
0
 def output(self, x):
     if self.on_memory:
         ret_tensor = self.onehot_list[x.flatten()].reshape(
             (x.shape[0], x.shape[1], self.vocab_size))
     else:
         ret_tensor = onehot_tensor(x, self.vocab_size)
     if self.zero_index != None:
         mask = T.neq(x, self.zero_index)
         ret_tensor *= mask[:, :, None]
     return ret_tensor
예제 #3
0
 def compute_tensor(self, x):
     if self.cached:
         if x.ndim == 1:
             ret_tensor = self.onehot_list[x]
         else:
             ret_tensor = self.onehot_list[x.flatten()].reshape((x.shape[0], x.shape[1], self.vocab_size))
     else:
         ret_tensor = onehot_tensor(x, self.vocab_size)
     if self.zero_index != None:
         mask = T.neq(x, self.zero_index)
         if x.ndim == 1:
             ret_tensor *= mask[:, None]
         else:
             ret_tensor *= mask[:, :, None]
     if self.mask:
         if x.ndim == 1:
             ret_tensor *= self.mask[:, None]
         else:
             ret_tensor *= self.mask[:, :, None]
     return ret_tensor
예제 #4
0
 def compute_tensor(self, x):
     if self.cached:
         if x.ndim == 1:
             ret_tensor = self.onehot_list[x]
         else:
             ret_tensor = self.onehot_list[x.flatten()].reshape(
                 (x.shape[0], x.shape[1], self.vocab_size))
     else:
         ret_tensor = onehot_tensor(x, self.vocab_size)
     if self.zero_index != None:
         mask = T.neq(x, self.zero_index)
         if x.ndim == 1:
             ret_tensor *= mask[:, None]
         else:
             ret_tensor *= mask[:, :, None]
     if self.mask:
         if x.ndim == 1:
             ret_tensor *= self.mask[:, None]
         else:
             ret_tensor *= self.mask[:, :, None]
     return ret_tensor
예제 #5
0
파일: onehot_embed.py 프로젝트: 52nlp/deepy
 def output(self, x):
     if self.on_memory:
         return self.onehot_list[x.flatten()].reshape((x.shape[0], x.shape[1], self.vocab_size))
     else:
         return onehot_tensor(x, self.vocab_size)