Пример #1
0
def test_with_maml(dataset, learner, checkpoint, steps, loss_fn):
    print("[*] Testing...")
    model = MAML(learner, steps=steps, loss_function=loss_fn)
    model.to(device)
    if checkpoint:
        model.restore(checkpoint, resume_training=False)
    else:
        print("[!] You are running inference on a randomly initialized model!")
    model.eval(dataset, compute_accuracy=(type(dataset) is OmniglotDataset))
    print("[*] Done!")
Пример #2
0
    model.train()
    loss, acc = adaptation(model,
                           optimizer,
                           trainbatch,
                           loss_fn,
                           lr=0.01,
                           train_step=5,
                           train=True,
                           device=device)

    train_loss_log.append(loss.item())
    train_acc_log.append(acc)

    # test
    evalbatch = evaliter.next()
    model.eval()
    testloss, testacc = test(model,
                             evalbatch,
                             loss_fn,
                             lr=0.01,
                             train_step=10,
                             device=device)

    test_loss_log.append(testloss.item())
    test_acc_log.append(testacc)

    print(
        "Epoch {}: train_loss = {:.4f}, train_acc = {:.4f}, test_loss = {:.4f}, test_acc = {:.4f}"
        .format(epoch, loss.item(), acc, testloss.item(), testacc))

torch.save(model.state_dict(), model_path + 'model.pth')