def test_forward3(self):
     np.random.seed(0)
     x = np.random.rand(10, 10, 10).astype('f')
     y2 = CF.softmax(x, axis=1)
     y = F.softmax_simple(Variable(x))
     res = np.allclose(y.data, y2.data)
     self.assertTrue(res)
Example #2
0
from dezero import Variable, as_variable
import dezero.functions as F
from dezero.models import MLP


def softmax1d(x):
    x = as_variable(x)
    y = F.exp(x)
    sum_y = F.sum(y)
    return y / sum_y


model = MLP((10, 3))

x = Variable(np.array([[0.2, -0.4]]))
y = model(x)
p = softmax1d(y)
print(y)
print(p)

x = np.array([[0.2, -0.4], [0.3, 0.5], [1.3, -3.2], [2.1, 0.3]])
t = np.array([2, 0, 1, 0])

y = model(x)
p = F.softmax_simple(y)
print(y)
print(p)

loss = F.softmax_cross_entropy_simple(y, t)
loss.backward()
print(loss)
 def test_forward1(self):
     x = np.array([[0, 1, 2], [0, 2, 4]], np.float32)
     y2 = CF.softmax(x, axis=1)
     y = F.softmax_simple(Variable(x))
     res = np.allclose(y.data, y2.data)
     self.assertTrue(res)
 def test_backward3(self):
     np.random.seed(0)
     x_data = np.random.rand(10, 10, 10)
     f = lambda x: F.softmax_simple(x, axis=1)
     self.assertTrue(gradient_check(f, x_data))
 def test_backward1(self):
     x_data = np.array([[0, 1, 2], [0, 2, 4]])
     f = lambda x: F.softmax_simple(x, axis=1)
     self.assertTrue(gradient_check(f, x_data))