def test_not_keep_graph(self): x = chainer.Variable(numpy.array([1], numpy.float32)) y = functions.Sigmoid()(x) reporter = chainer.Reporter() with self._scope(False): reporter.report({'y': y}) self.assertIsNone(reporter.observation['y'].creator)
def test_keep_graph_default(self): x = chainer.Variable(numpy.array([1], numpy.float32)) y, = functions.Sigmoid().apply((x, )) reporter = chainer.Reporter() with self._scope(None): reporter.report({'y': y}) self.assertIsNone(reporter.observation['y'].creator)
def backward(self, inputs, grad_outputs): xp = cuda.get_array_module(*inputs) x, t = inputs gloss = grad_outputs[0] y, = f.Sigmoid(self.use_cudnn).forward((x, )) gx = gloss * (y - t) return gx, None
def __call__(self, x): self.flag_history.append(configuration.config.keep_graph_on_report) h = self.l1(x) if self.i == 0: h, = functions.Sigmoid().apply((h, )) else: h = functions.Tanh()(h) h = self.l2(h) self.i += 1 return h
def func(self, c, h): # self.a1 = numpy.tanh(a1) # self.i1 = _sigmoid(i1) # self.f1 = _sigmoid(f1) # # self.a2 = numpy.tanh(a2) # self.i2 = _sigmoid(i2) # self.f2 = _sigmoid(f2) # # self.o = _sigmoid(o1 + o2) # self.c = self.a1 * self.i1 + self.a2 * self.i2 + \ # self.f1 * c_prev1 + self.f2 * c_prev2 # # h = self.o * numpy.tanh(self.c) a1, i1, f1, o1 = chainer_extract_gates(h) a2, i2, f2, o2 = chainer_extract_gates(h) self.c = F.Tanh(a1) * F.Sigmoid(i1) + F.Sigmoid(f1) * c_prev1 + \ F.Tanh(a2) * F.Sigmoid(i2) + F.Sigmoid(f2) * c_prev2 self.o = F.Sigmoid(o1 + o2) h = self.o * numpy.tanh(self.c)
def check_backward(self, x_data, y_grad, use_cudnn=True): gradient_check.check_backward(functions.Sigmoid(use_cudnn), x_data, y_grad)
def check_backward(self, x_data, y_grad, use_cudnn='always'): with chainer.using_config('use_cudnn', use_cudnn): gradient_check.check_backward(functions.Sigmoid(), x_data, y_grad, **self.check_backward_options)