def bprop(self):
     ca.multiply(self.mu.out, self.out_grad, self.mu.out_grad)
     self.mu.out_grad *= self.out_grad
     ca.exp(self.log_sigma.out, out=self.log_sigma.out_grad)
     self.log_sigma.out_grad -= 1
     self.log_sigma.out_grad *= 0.5
     self.log_sigma.out_grad *= self.out_grad
 def bprop(self):
     grad = ca.reshape(self.grad_array, self.bcast_shape)
     ca.multiply(self.mu.array, grad, self.mu.grad_array)
     ca.exp(self.logvar.array, out=self.logvar.grad_array)
     self.logvar.grad_array -= 1
     self.logvar.grad_array *= 0.5
     self.logvar.grad_array *= grad
 def bprop(self):
     grad = ca.reshape(self.grad_array, self.bcast_shape)
     ca.multiply(self.mu.array, grad, self.mu.grad_array)
     ca.exp(self.logvar.array, out=self.logvar.grad_array)
     self.logvar.grad_array -= 1
     self.logvar.grad_array *= 0.5
     self.logvar.grad_array *= grad
Exemple #4
0
 def fprop(self):
     # e_i = exp(x_i - max(x))
     # y = e_i / sum(e)
     tmp1 = ca.amax(self.x.array, axis=1, keepdims=True)
     ca.subtract(self.x.array, tmp1, self.array)
     ca.exp(self.array, self.array)
     ca.sum(self.array, axis=1, keepdims=True, out=tmp1)
     self.array /= tmp1
 def fprop(self):
     tmp1 = self.mu.out**2
     ca.negative(tmp1, tmp1)
     tmp1 += self.log_sigma.out
     tmp1 += 1
     tmp1 -= ca.exp(self.log_sigma.out)
     ca.sum(tmp1, axis=1, keepdims=True, out=self.out)
     self.out *= -0.5
 def fprop(self):
     tmp1 = self.mu.out**2
     ca.negative(tmp1, tmp1)
     tmp1 += self.log_sigma.out
     tmp1 += 1
     tmp1 -= ca.exp(self.log_sigma.out)
     self.out = ca.sum(tmp1)
     self.out *= -0.5
 def fprop(self):
     tmp1 = self.mu.array**2
     ca.negative(tmp1, tmp1)
     tmp1 += self.logvar.array
     tmp1 += 1
     tmp1 -= ca.exp(self.logvar.array)
     ca.sum(tmp1, axis=self.axis, out=self.array)
     self.array *= -0.5
 def fprop(self):
     tmp1 = self.mu.array**2
     ca.negative(tmp1, tmp1)
     tmp1 += self.log_sigma.array
     tmp1 += 1
     tmp1 -= ca.exp(self.log_sigma.array)
     self.array = ca.sum(tmp1)
     self.array *= -0.5
 def fprop(self):
     tmp1 = self.mu.array**2
     ca.negative(tmp1, tmp1)
     tmp1 += self.logvar.array
     tmp1 += 1
     tmp1 -= ca.exp(self.logvar.array)
     ca.sum(tmp1, axis=self.axis, out=self.array)
     self.array *= -0.5
Exemple #10
0
 def fprop(self):
     tmp1 = self.mu.out**2
     ca.negative(tmp1, tmp1)
     tmp1 += self.log_sigma.out
     tmp1 += 1
     tmp1 -= ca.exp(self.log_sigma.out)
     self.out = ca.sum(tmp1)
     self.out *= -0.5
 def fprop(self):
     tmp1 = self.mu.array**2
     ca.negative(tmp1, tmp1)
     tmp1 += self.log_sigma.array
     tmp1 += 1
     tmp1 -= ca.exp(self.log_sigma.array)
     self.array = ca.sum(tmp1)
     self.array *= -0.5
Exemple #12
0
 def bprop(self):
     ca.negative(self.x.out, self.x.out_grad)
     ca.exp(self.x.out_grad, self.x.out_grad)
     self.x.out_grad += 1
     ca.divide(1.0, self.x.out_grad, out=self.x.out_grad)
     self.x.out_grad *= self.out_grad
Exemple #13
0
 def fprop(self):
     ca.exp(self.x.out, self.out)
     # TODO: use log1p()
     self.out += 1
     ca.log(self.out, self.out)
 def bprop(self):
     ca.multiply(self.mu.array, self.grad_array, self.mu.grad_array)
     ca.exp(self.log_sigma.array, out=self.log_sigma.grad_array)
     self.log_sigma.grad_array -= 1
     self.log_sigma.grad_array *= 0.5
     self.log_sigma.grad_array *= self.grad_array
Exemple #15
0
 def fprop(self, x):
     self._tmp_x = x
     return ca.log(1.0 + ca.exp(x))
Exemple #16
0
 def fprop(self):
     ca.exp(self.x.out, out=self.out)
Exemple #17
0
def softmax(x):
    e = ca.exp(x - ca.amax(x, axis=1, keepdims=True))
    return e/ca.sum(e, axis=1, keepdims=True)
Exemple #18
0
 def bprop(self):
     ca.exp(self.x.out, out=self.x.out_grad)
     self.x.out_grad *= self.out_grad
Exemple #19
0
 def bprop(self, y_grad):
     return 1.0 / (1.0 + ca.exp(-self._tmp_x)) * y_grad
Exemple #20
0
 def fprop(self, x):
     self._tmp_x = x
     return ca.log(1.0 + ca.exp(x))
Exemple #21
0
 def bprop(self, y_grad):
     return 1.0/(1.0 + ca.exp(-self._tmp_x)) * y_grad
Exemple #22
0
 def fprop(self):
     ca.exp(self.x.out, out=self.out)
 def bprop(self):
     ca.multiply(self.mu.array, self.grad_array, self.mu.grad_array)
     ca.exp(self.log_sigma.array, out=self.log_sigma.grad_array)
     self.log_sigma.grad_array -= 1
     self.log_sigma.grad_array *= 0.5
     self.log_sigma.grad_array *= self.grad_array
Exemple #24
0
 def fprop(self):
     ca.exp(self.x.array, out=self.array)
Exemple #25
0
 def bprop(self):
     ca.exp(self.x.out, out=self.x.out_grad)
     self.x.out_grad *= self.out_grad
Exemple #26
0
 def bprop(self):
     ca.exp(self.x.array, out=self.x.grad_array)
     self.x.grad_array *= self.grad_array
Exemple #27
0
 def fprop(self):
     ca.exp(self.x.array, out=self.array)
Exemple #28
0
def softmax(x):
    e = ca.exp(x - ca.amax(x, axis=1, keepdims=True))
    return e / ca.sum(e, axis=1, keepdims=True)
Exemple #29
0
 def bprop(self):
     ca.multiply(self.mu.out, self.out_grad, self.mu.out_grad)
     ca.exp(self.log_sigma.out, out=self.log_sigma.out_grad)
     self.log_sigma.out_grad -= 1
     self.log_sigma.out_grad *= 0.5
     self.log_sigma.out_grad *= self.out_grad
Exemple #30
0
 def bprop(self):
     ca.exp(self.x.array, out=self.x.grad_array)
     self.x.grad_array *= self.grad_array