def forward(self, x): if self.h is None: batch_size = x.data.shape[0] self.h = Variable(np.zeros((batch_size, self.hidden_size))) a = self.l1(x) + self.l2(self.h) self.h = F.tanh(a) return self.h
def forward(x): x = model.embed(x) x = tanh(x) x = model.linear(x) x = sigmoid(x) return x
def test_forward(self): x = Variable(np.random.rand(1)) y = F.tanh(x) expected = np.tanh(x.data) self.assertTrue(np.allclose(y.data, expected))
def sigmoid(x): return 0.5 * (tanh(x) + 1)
import numpy as np import heapq import matplotlib.pyplot as plt import chainer0 from chainer0 import Function, Variable import chainer0.functions as F from chainer0.computational_graph import get_dot_graph x = Variable(np.array([1.0]), name='x') #y = F.sin(x) #y = (y + F.exp(x) - 0.5) * y #y.backward() y = F.tanh(x) y.backward() for i in range(3): gx = x.grad_var x.cleargrad() gx.backward() txt = get_dot_graph(gx) print(txt)
def forward(self, x): x = F.tanh(self.l1(x)) x = F.tanh(self.l2(x)) return self.l3(x)