Example #1
0
labels = label_names()
dataset = get_dataset_as_torch_dataset()

trainset, testset = torch.utils.data.random_split(dataset, [len(dataset) - 500, 500])

trainloader = torch.utils.data.DataLoader(trainset, batch_size=20, shuffle=True)
testloader = torch.utils.data.DataLoader(trainset, batch_size=500, shuffle=True)

classes = tuple(labels.values())

model = SimpleModel()
loss_fn = torch.nn.CrossEntropyLoss()
learning_rate = 1e-4
# optimizer = torch.optim.SGD(model.parameters(), lr=1e-2, momentum=0.9)
optimizer = torch.optim.Adagrad(model.parameters(), lr=1e-2, lr_decay=1e-5)


for epoch in range(EPOCHS):  # loop over the dataset multiple times
    print(f"EPOCH {epoch + 1}/{EPOCHS}", end='')

    running_loss = 0.0
    correct_train = 0
    for i, data in enumerate(trainloader, 0):
        # get the inputs; data is a list of [inputs, labels]
        inputs, labels = data

        # zero the parameter gradients
        optimizer.zero_grad()

        # forward + backward + optimize
Example #2
0
images, labels = dataiter.next()
imshow(torchvision.utils.make_grid(images), 0.1307, 0.3081)
print(labels)

# Get Device
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

# Model
from models import SimpleModel
model = SimpleModel().to(device)

# Loss function
criterion = nn.CrossEntropyLoss()

# Optimizer
optimizer = optim.SGD(model.parameters(), lr=learning_rate)

num_steps = len(train_loader)

for epoch in range(num_epochs):

    # ---------- TRAINING ----------
    # set model to training
    model.train()

    total_loss = 0

    for i, (images, labels) in enumerate(train_loader):
        images, labels = images.to(device), labels.to(device)

        # Zero gradients