def test_forward1(self):
     x = np.array([[-1, 0, 1, 2], [2, 0, 1, -1]], np.float32)
     t = np.array([3, 0]).astype(np.int32)
     y = F.softmax_cross_entropy_simple(x, t)
     y2 = CF.softmax_cross_entropy(x, t)
     res = np.allclose(y.data, y2.data)
     self.assertTrue(res)
Пример #2
0
from dezero import Variable, as_variable
import dezero.functions as F
from dezero.models import MLP


def softmax1d(x):
    x = as_variable(x)
    y = F.exp(x)
    sum_y = F.sum(y)
    return y / sum_y


model = MLP((10, 3))

x = Variable(np.array([[0.2, -0.4]]))
y = model(x)
p = softmax1d(y)
print(y)
print(p)

x = np.array([[0.2, -0.4], [0.3, 0.5], [1.3, -3.2], [2.1, 0.3]])
t = np.array([2, 0, 1, 0])

y = model(x)
p = F.softmax_simple(y)
print(y)
print(p)

loss = F.softmax_cross_entropy_simple(y, t)
loss.backward()
print(loss)
 def test_backward3(self):
     N, CLS_NUM = 100, 10
     x = np.random.randn(N, CLS_NUM)
     t = np.random.randint(0, CLS_NUM, (N, ))
     f = lambda x: F.softmax_cross_entropy_simple(x, t)
     self.assertTrue(gradient_check(f, x))
 def test_backward1(self):
     x = np.array([[-1, 0, 1, 2], [2, 0, 1, -1]], np.float32)
     t = np.array([3, 0]).astype(np.int32)
     f = lambda x: F.softmax_cross_entropy_simple(x, Variable(t))
     self.assertTrue(gradient_check(f, x))
Пример #5
0
batch_size = 30
hidden_size = 10
lr = 1.0

x,t = dezero.datasets.get_spiral(train=True)
model = MLP((hidden_size,3))
optimizer = optimizers.SGD(lr).setup(model)

data_size = len(x)
max_iter = math.ceil(data_size / batch_size)

for epoch in range(max_epoch):
    index = np.random.permutation(data_size)
    sum_loss = 0

    for i in range(max_iter):
        batch_index = index[i*batch_size:(i+1)*batch_size]
        batch_x = x[batch_index]
        batch_t = t[batch_index]

        y = model(batch_x)
        loss = F.softmax_cross_entropy_simple(y,batch_t)
        model.cleargrads()
        loss.backward()
        optimizer.update()

        sum_loss += float(loss.data) * len(batch_t)

    avg_loss = sum_loss / data_size
    print('epoch %d, loss %.2f' % (epoch + 1, avg_loss))