def test_backward1(self):
     x = np.array([[-1, 0, 1, 2], [2, 0, 1, -1]])
     f = lambda x: F.log_softmax(x)
     self.assertTrue(gradient_check(f, x))
 def test_backward2(self):
     x = np.random.randn(10, 10)
     f = lambda x: F.log_softmax(x)
     self.assertTrue(gradient_check(f, x))
 def test_forward1(self):
     x = np.array([[-1, 0, 1, 2], [2, 0, 1, -1]], np.float32)
     y = F.log_softmax(x)
     y2 = CF.log_softmax(x)
     res = np.allclose(y.data, y2.data)
     self.assertTrue(res)