def test_LinearSoftmax(self): model = Seq() model.add(Linear(2, 1)) model.add(Softmax()) data = np.array([2., 3.]) out = model.forward(data) self.assertEqual(out, 1.)
def test_LinearSigmoid(self): model = Seq() model.add(Linear(2, 1, initialize='ones')) model.add(Sigmoid()) data = np.array([2., 3.]) out = model.forward(data) self.assertEqual(round(out, 2), 1.)
def test_LinearLayerNumericalGradientCheck(self): x = np.random.rand(3) model = Seq() model.add(Linear(3, 2, initialize='ones')) num_grad = numerical_gradient.calc(model.forward, x) deriv_grad = model.backward(np.array([1, 1])) num_grad = np.sum(num_grad, axis=0) numerical_gradient.assert_are_similar(deriv_grad, num_grad)
def test_Linear(self): model = Seq() model.add(Linear(2, 1, initialize='ones')) data = np.array([2., 2.]) y = model.forward(data) self.assertEqual(y, np.array([5]))