예제 #1
0
def evaluate(model, data):
    x_data = data['x']
    y_data = data['y']

    batch_size = 100
    size = len(x_data)
    correct = 0
    loss_value = 0
    loss = SoftmaxCrossEntropyLoss('loss')
    for start_idx in range(0, size, batch_size):
        end_idx = min(start_idx + batch_size, size)
        x = np.array(x_data[start_idx:end_idx])
        y = y_data[start_idx:end_idx]

        ans = model.forward(x)
        output = softmax(ans)

        loss_value += len(y) * loss.forward(ans, onehot_encoding(y, 5))
        correct += len(y) * calculate_acc(output, y)

    return loss_value / size, correct / size
예제 #2
0
    for i in range(epoch):

        for start_idx in range(0, size, batch_size):
            end_idx = min(start_idx + batch_size, size)
            x = np.array(x_data[start_idx:end_idx])
            label = y_data[start_idx:end_idx]
            y = onehot_encoding(label, 5)

            x = np.array(x)

            val_loss, val_acc = 0, 0
            val_loss, val_acc = evaluate(model, val_data)

            ans = model.forward(x)
            train_acc = calculate_acc(softmax(ans), label)
            train_loss = loss.forward(ans, y)

            log_dict = {
                'step': global_step,
                'train_loss': train_loss,
                'train_acc': train_acc,
                'val_loss': val_loss,
                'val_acc': val_acc
            }

            logs.append(log_dict)

            msg = 'epoch: %d  steps: %d \n train_loss: %.3f   train_acc: %.3f \n valid_loss: %.3f   valid_acc: %.3f' % (
                i, global_step, train_loss, train_acc, val_loss, val_acc)
            print(msg)