Esempio n. 1
0
            'time':
            format_time(time.time() - st_time),
            'eta':
            format_time((time.time() - st_time) / (epoch + 1) *
                        (args.epochs - epoch - 1)),
        })
        print(row)
        logger.writerow(row)
    ##end for epoch
    torch.save({
        'model': cnn.state_dict(),
    }, ckpt_cnn_filename)
    logger.close()
else:
    print("\n Loading pretrained model...")
    cnn.load_state_dict(torch.load(ckpt_cnn_filename)['model'])
    cnn = cnn.cuda()

val_acc = test(test_loader) * 100.0
print(val_acc)
with open(log_filename, 'a') as f:
    f.write(
        "\n==================================================================================================="
    )

eval_results_fullpath = ckpt_directory + "/test_result_" + test_id + ".txt"
if not os.path.isfile(eval_results_fullpath):
    eval_results_logging_file = open(eval_results_fullpath, "w")
    eval_results_logging_file.close()
with open(eval_results_fullpath, 'a') as eval_results_logging_file:
    eval_results_logging_file.write(
Esempio n. 2
0
        with torch.no_grad():
            pred = cnn(images)
        if teacher is not None:
            fs, pred = pred

        pred = torch.max(pred.data, 1)[1]
        total += labels.size(0)
        correct += (pred == labels).sum().item()

    val_acc = correct / total
    cnn.train()
    return val_acc

if args.test:
    cnn.load_state_dict(torch.load(args.resume))
    print(test(test_loader))
    exit()

# train
best_acc = 0.0
st_time = time.time()
for epoch in range(args.epochs):
    loss_avg = {}
    correct = 0.
    total = 0.
    cnt_ft = {}
    for i, (images, labels) in enumerate(train_loader):
        images, labels = images.cuda(), labels.cuda()

        cnn.zero_grad()