def test(): l = 0 correct = 0 for i, (x, label) in enumerate(testloader): x = Tensor(x.numpy(), requires_grad=False) label = Tensor(label.numpy(), requires_grad=False).reshape((x.size(0), 1, 1)) x_ = model(x) x_label = Tensor((x.size(0), 1, OUTPUT_SIZE), requires_grad=False) x_label.put_(label, 2, 1.0) preds = x_.argmax(2).reshape((x.size(0), 1, 1)) correct += (preds == label).sum(axis=(0, 1, 2)).item() loss = criterion(x_, x_label, axis=(0, 1, 2)) l += loss.item() optim.zero_grad() return l / len(testset), correct / len(testset)
def test(): l = 0 correct = 0 for i, (x, label) in enumerate(testloader): x = Tensor(x.view(-1, INPUT_SIZE).numpy(), requires_grad=False) label = Tensor(label.numpy().reshape(label.size(0), 1), requires_grad=False) x_ = model(x) x_label = Tensor((x_.shape[0], OUTPUT_SIZE), requires_grad=False) x_label.put_(label, 1, 1.0) preds = x_.argmax(1).reshape((label.shape[0], 1)) correct += (preds == label).sum().item() loss = criterion(x_, x_label, axis=(0, 1)) l += loss return l / len(testset), correct / len(testset)