def test_numeric_gradient(self): nll = NegativeLogLikelihoodLoss() y = np.random.rand(2) t = np.random.rand(2) nll.loss(y, t) gradient = nll.numeric_gradient(y) dJdy = nll.dJdy_gradient(y, t) assert_almost_equal(np.diag(gradient), dJdy, decimal=5)
def test_calc_delta(self): l1 = SoftMaxLayer() n = Sequential([l1]) x = np.array([15.0, 10.0, 2.0]) y = n.forward(x) self.assertEqual(y.shape, (3, )) nll = NegativeLogLikelihoodLoss() t = np.array([0.0, 0.0, 1.0]) self.assertEqual(y.shape, t.shape) J1 = nll.loss(y, t) self.assertEqual(J1.shape, (3, )) assert_almost_equal(J1, [0.0, 0.0, 13.0067176], decimal=5) cel = CrossEntropyLoss() t = np.array([0.0, 0.0, 1.0]) J2 = cel.loss(x, t) self.assertEqual(J2.shape, (3, )) assert_almost_equal(J2, [0.0, 0.0, 13.0067176], decimal=5) delta_in = -nll.dJdy_gradient(y, t) assert_almost_equal(delta_in, [0.0, 0.0, 445395.349996]) delta_out1 = n.backward(delta_in) assert_almost_equal(delta_out1, [-0.9933049, -0.0066928, 0.9999978], decimal=5) # delta_out2 = -cel.dJdy_gradient(x, t) assert_almost_equal(delta_out2, [-0.9933049, -0.0066928, 0.9999978], decimal=5)
def test_calc_loss(self): l1 = SoftMaxLayer() n = Sequential([l1]) x = np.array([15.0, 10.0, 2.0]) y = n.forward(x) self.assertEqual(y.shape, (3, )) nll = NegativeLogLikelihoodLoss() t = np.array([0.0, 0.0, 1.0]) self.assertEqual(y.shape, t.shape) J = nll.loss(y, t) self.assertEqual(J.shape, (3, )) assert_almost_equal(J, [0.0, 0.0, 13.0067176], decimal=5)