def output(self, x): if self.on_memory: ret_tensor = self.onehot_list[x.flatten()].reshape((x.shape[0], x.shape[1], self.vocab_size)) else: ret_tensor = onehot_tensor(x, self.vocab_size) if self.zero_index != None: mask = T.neq(x, self.zero_index) ret_tensor *= mask[:, :, None] return ret_tensor
def output(self, x): if self.on_memory: ret_tensor = self.onehot_list[x.flatten()].reshape( (x.shape[0], x.shape[1], self.vocab_size)) else: ret_tensor = onehot_tensor(x, self.vocab_size) if self.zero_index != None: mask = T.neq(x, self.zero_index) ret_tensor *= mask[:, :, None] return ret_tensor
def compute_tensor(self, x): if self.cached: if x.ndim == 1: ret_tensor = self.onehot_list[x] else: ret_tensor = self.onehot_list[x.flatten()].reshape((x.shape[0], x.shape[1], self.vocab_size)) else: ret_tensor = onehot_tensor(x, self.vocab_size) if self.zero_index != None: mask = T.neq(x, self.zero_index) if x.ndim == 1: ret_tensor *= mask[:, None] else: ret_tensor *= mask[:, :, None] if self.mask: if x.ndim == 1: ret_tensor *= self.mask[:, None] else: ret_tensor *= self.mask[:, :, None] return ret_tensor
def compute_tensor(self, x): if self.cached: if x.ndim == 1: ret_tensor = self.onehot_list[x] else: ret_tensor = self.onehot_list[x.flatten()].reshape( (x.shape[0], x.shape[1], self.vocab_size)) else: ret_tensor = onehot_tensor(x, self.vocab_size) if self.zero_index != None: mask = T.neq(x, self.zero_index) if x.ndim == 1: ret_tensor *= mask[:, None] else: ret_tensor *= mask[:, :, None] if self.mask: if x.ndim == 1: ret_tensor *= self.mask[:, None] else: ret_tensor *= self.mask[:, :, None] return ret_tensor
def output(self, x): if self.on_memory: return self.onehot_list[x.flatten()].reshape((x.shape[0], x.shape[1], self.vocab_size)) else: return onehot_tensor(x, self.vocab_size)