Example #1
0
def eval_model(epoch):
    correct = 0
    total = 0
    loss = 0
    for images, labels in test_loader:
        images = Variable(images)
        labels_var = Variable(labels)
        outputs = model(images)
        predicted = np.argmax(outputs.data, 1)
        loss += F.cross_entropy(outputs, labels_var, size_average=False).data
        total += labels.shape[0]
        correct += (predicted == labels).sum()

    print('\rEpoch [{}/{}], Test Accuracy: {}%  Loss: {:.4f}'.format(
        epoch + 1, num_epochs, 100 * correct / total, loss / total))
Example #2
0
        images = Variable(images)
        labels_var = Variable(labels)
        outputs = model(images)
        predicted = np.argmax(outputs.data, 1)
        loss += F.cross_entropy(outputs, labels_var, size_average=False).data
        total += labels.shape[0]
        correct += (predicted == labels).sum()

    print('\rEpoch [{}/{}], Test Accuracy: {}%  Loss: {:.4f}'.format(
        epoch + 1, num_epochs, 100 * correct / total, loss / total))


for epoch in range(num_epochs):
    model.train()
    for i, (images, labels) in enumerate(train_loader):
        images = Variable(images, requires_grad=True)
        labels = Variable(labels)

        # Forward pass
        outputs = model(images)
        loss = F.cross_entropy(outputs, labels)

        # Backward and optimize
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()

        print('\rEpoch [{}/{}], Step [{}/{}], Loss: {:.4f}'.format(
            epoch + 1, num_epochs, i + 1, len(train_loader), loss.data),
              end=' ')
    eval_model(epoch)
Example #3
0
import bintorch
from bintorch.autograd import Variable
import bintorch.nn.functional as F
import autograd.numpy as np

target = Variable(np.array((1, 3, 4, 3, 3)), requires_grad=False)
y = Variable(np.zeros((5, 5)), requires_grad=False)

l = Variable(np.ones((5, 5)), requires_grad=True)
m = Variable(np.ones((5, 5)), requires_grad=True)

x = l + m + y

x = F.cross_entropy(x, target)

x.backward()

print(x.data)

print(l.grad)