Esempio n. 1
0
  def test_grad_ob_aux_return(self):
    class Test(bp.Base):
      def __init__(self):
        super(Test, self).__init__()
        self.a = bm.TrainVar(bm.ones(10))
        self.b = bm.TrainVar(bm.random.randn(10))
        self.c = bm.TrainVar(bm.random.uniform(size=10))

      def __call__(self):
        return bm.sum(self.a + self.b + self.c), (bm.sin(100), bm.exp(0.1))

    bm.random.seed(0)
    t = Test()
    f_grad = bm.grad(t, grad_vars=[t.a, t.b], dyn_vars=t.vars(),
                     has_aux=True, return_value=True)
    grads, returns, aux = f_grad()
    for g in grads: assert (g == 1.).all()
    assert returns == bm.sum(t.a + t.b + t.c)
    assert aux[0] == bm.sin(100)
    assert aux[1] == bm.exp(0.1)

    t = Test()
    f_grad = bm.grad(t, grad_vars=t.a, dyn_vars=t.vars(),
                     has_aux=True, return_value=True)
    grads, returns, aux = f_grad()
    assert (grads == 1.).all()
    assert returns == bm.sum(t.a + t.b + t.c)
    assert aux[0] == bm.sin(100)
    assert aux[1] == bm.exp(0.1)
Esempio n. 2
0
 def derivative(self, u, t, Iext):
     r1 = bm.square(u)
     r2 = 1.0 + self.k * bm.sum(r1)
     r = r1 / r2
     Irec = bm.dot(self.conn_mat, r)
     du = (-u + Irec + Iext) / self.tau
     return du
Esempio n. 3
0
 def update(self, _t, _dt):
   r1 = bm.square(self.u)
   r2 = 1.0 + self.k * bm.sum(r1)
   self.r.value = r1 / r2
   Irec = bm.dot(self.conn_mat, self.r)
   self.u.value = self.u + (-self.u + Irec + self.input) / self.tau * _dt
   self.input[:] = 0.
Esempio n. 4
0
 def update(self, _t, _dt):
     r1 = bm.square(self.u)
     r2 = 1.0 + self.k * bm.sum(r1)
     self.r.value = r1 / r2
     r = bm.fft.fft2(self.r)
     jjft = bm.fft.fft2(self.conn_mat)
     interaction = bm.real(bm.fft.ifft2(r * jjft))
     self.u.value = self.u + (-self.u + self.input +
                              interaction) / self.tau * _dt
     self.input[:] = 0.
Esempio n. 5
0
  def test_grad_pure_func_return1(self):
    def call(a, b, c): return bm.sum(a + b + c)

    bm.random.seed(1)
    a = bm.ones(10)
    b = bm.random.randn(10)
    c = bm.random.uniform(size=10)
    f_grad = bm.grad(call, return_value=True)
    grads, returns = f_grad(a, b, c)
    assert (grads == 1.).all()
    assert returns == bm.sum(a + b + c)
Esempio n. 6
0
  def test_grad_func_return_aux1(self):
    def call(a, b, c):
      return bm.sum(a + b + c), (bm.sin(100), bm.exp(0.1))

    bm.random.seed(1)
    a = bm.ones(10)
    b = bm.random.randn(10)
    c = bm.random.uniform(size=10)
    f_grad = bm.grad(call, return_value=True, has_aux=True)
    grads, returns, aux = f_grad(a, b, c)
    assert (grads == 1.).all()
    assert returns == bm.sum(a + b + c)
    assert aux[0] == bm.sin(100)
    assert aux[1] == bm.exp(0.1)
Esempio n. 7
0
 def __call__(self):
   return bm.sum(self.a + self.b + self.c)
Esempio n. 8
0
 def call(a, b, c):
   return bm.sum(a + b + c), (bm.sin(100), bm.exp(0.1))
Esempio n. 9
0
    def call(a, b, c): return bm.sum(a + b + c)

    bm.random.seed(1)
Esempio n. 10
0
 def __call__(self, d):
   return bm.sum(self.a + self.b + self.c + 2 * d), (bm.sin(100), bm.exp(0.1))
Esempio n. 11
0
 def __call__(self, d):
   return bm.sum(self.a + self.b + self.c + 2 * d)