Exemplo n.º 1
0
 def forward_cpu(self, x: tensor) -> tensor:
     data = np.tanh(x.host_data)
     if self.inplace:
         self.cache = [x, x]
         x.host_data = data
         return x
     else:
         y = zeros_like(x)
         self.cache = [x, y]
         y.host_data = data
         return y
Exemplo n.º 2
0
 def forward_cpu(self, x: tensor) -> tensor:
     tmp = x.host_data > 0
     data = x.host_data * tmp
     if self.inplace:
         self.cache = [x, tmp, x]
         x.host_data = data
         return x
     else:
         y = zeros_like(x)
         self.cache = [x, tmp, y]
         y.host_data = data
         return y
Exemplo n.º 3
0
 def _backward_cpu(self, dx: tensor, dy: tensor) -> tensor:
     dx.host_data = np.transpose(dy.host_data, self.axes)
     return dx
Exemplo n.º 4
0
def softmax_util_cpu(x: tensor, y: tensor) -> tensor:
    eX = np.exp((x.host_data.T - np.max(x.host_data, axis=1)).T)
    y.host_data = (eX.T / eX.sum(axis=1)).T
    return y
Exemplo n.º 5
0
 def _backward_cpu(self, dx: tensor, dy: tensor) -> tensor:
     dx.host_data = dy.host_data / (1 - self.prob)
     dx[self.mask.host_data] = 0
     return dx