def test_calc_delta(self): l1 = SoftMaxLayer() n = Sequential([l1]) x = np.array([15.0, 10.0, 2.0]) y = n.forward(x) self.assertEqual(y.shape, (3, )) nll = NegativeLogLikelihoodLoss() t = np.array([0.0, 0.0, 1.0]) self.assertEqual(y.shape, t.shape) J1 = nll.loss(y, t) self.assertEqual(J1.shape, (3, )) assert_almost_equal(J1, [0.0, 0.0, 13.0067176], decimal=5) cel = CrossEntropyLoss() t = np.array([0.0, 0.0, 1.0]) J2 = cel.loss(x, t) self.assertEqual(J2.shape, (3, )) assert_almost_equal(J2, [0.0, 0.0, 13.0067176], decimal=5) delta_in = -nll.dJdy_gradient(y, t) assert_almost_equal(delta_in, [0.0, 0.0, 445395.349996]) delta_out1 = n.backward(delta_in) assert_almost_equal(delta_out1, [-0.9933049, -0.0066928, 0.9999978], decimal=5) # delta_out2 = -cel.dJdy_gradient(x, t) assert_almost_equal(delta_out2, [-0.9933049, -0.0066928, 0.9999978], decimal=5)
def test_variable_dict(self): xv = np.array([0.5, 0.1, 0.5]) yv = np.array([0.2, 0.4, 0.5]) valin = ['x', 'y'] x = Input(valin, 'x') y = Input(valin, 'y') xyv = [xv, yv] Wxv = np.array([[2.1, 3.1, 2.2], [2.2, 3.2, 4.2], [2.2, 5.2, 4.2]]) Wyv = np.array([[2.1, 2.1, 2.2], [1.6, 1.2, 6.2], [2.1, 3.1, 2.2]]) Wx = MWeight(3, 3, weights=Wxv) Wy = MWeight(3, 3, weights=Wyv) net = ComputationalGraphLayer(Sigmoid(Wx.dot(x)) + Tanh(Wy.dot(y))) netDict = Sequential(VariableDictLayer(valin), net) out = net.forward(xyv) self.assertEqual(out.shape, (3, )) assert_almost_equal(out, sigmoid(Wxv.dot(xv)) + np.tanh(Wyv.dot(yv))) dJdy = net.backward(np.array([1.0, 1.0, 1.0])) self.assertEqual(len(dJdy), 2) for ind, key in enumerate(dJdy): self.assertEqual(dJdy[ind].shape, xyv[ind].shape) assert_almost_equal(dJdy[ind], np.sum(net.numeric_gradient(xyv)[ind], 0)) auxdict = {'x': 0, 'y': 1} out = netDict.forward({'x': xv, 'y': yv}) self.assertEqual(out.shape, (3, )) assert_almost_equal(out, sigmoid(Wxv.dot(xv)) + np.tanh(Wyv.dot(yv))) dJdy = netDict.backward(np.array([1.0, 1.0, 1.0])) self.assertEqual(len(dJdy), 2) for key in dJdy: self.assertEqual(dJdy[key].shape, xyv[auxdict[key]].shape) assert_almost_equal( dJdy[key], np.sum(net.numeric_gradient(xyv)[auxdict[key]], 0))
def test_SigmoidLayer(self): l1 = SigmoidLayer() n = Sequential([l1]) y = n.forward(np.array([0])) self.assertEqual(y.shape, (1, )) assert_array_equal(y, np.array([0.5])) d = n.backward(np.array([1])) self.assertEqual(d.shape, (1, )) assert_array_equal(d, np.array([0.25]))
def test_LinearLayer(self): l1 = LinearLayer(5, 6, 'ones') n = Sequential([l1]) y = n.forward(np.array([2.0, 1.0, 2.0, 3.0, 4.0])) self.assertEqual(y.shape, (6, )) assert_array_equal(y, np.array([ 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, ])) l2 = LinearLayer(6, 2, 'ones') n.add(l2) y = n.forward(np.array([2.0, 1.0, 2.0, 3.0, 4.0])) self.assertEqual(y.shape, (2, )) assert_array_equal(y, np.array([79.0, 79.0])) d = n.backward(np.array([2.0, 3.0])) self.assertEqual(d.shape, (5, )) assert_array_equal(d, np.array([30., 30., 30., 30., 30.]))