logits = model(images) loss = F.cross_entropy(logits, labels) val_accs.append((logits.argmax(1) == labels).float().mean().item()) val_losses.append(loss.item()) if CALLBACK: CALLBACK.scalar('Val_Loss', epoch + 1, val_losses.last_avg) CALLBACK.scalar('Val_Acc', epoch + 1, val_accs.last_avg) print('Epoch {} | Validation | Loss: {:.4f}, Accs: {:.4f}'.format(epoch, val_losses.avg, val_accs.avg)) if not (epoch + 1) % args.save_freq: save_checkpoint( model.state_dict(), os.path.join(args.save_folder, args.save_name + 'acc{}_{}.pth'.format(val_accs.avg, (epoch + 1))), cpu=True) valacc_final = val_accs.avg test_accs = AverageMeter() test_losses = AverageMeter() with torch.no_grad(): for i, batch_data in enumerate(tqdm.tqdm(test_loader, ncols=80)): images, labels = batch_data['image'].to(DEVICE), batch_data['label_idx'].to(DEVICE) logits = model(images) loss = F.cross_entropy(logits, labels) test_accs.append((logits.argmax(1) == labels).float().mean().item()) test_losses.append(loss.item())
for i, (images, labels) in enumerate(tqdm.tqdm(val_loader, ncols=80)): images, labels = images.to(DEVICE), labels.to(DEVICE) logits = model(images) loss = F.cross_entropy(logits, labels) val_accs.append((logits.argmax(1) == labels).float().mean().item()) val_losses.append(loss.item()) print('Epoch {} | Validation | Loss: {:.4f}, Accs: {:.4f}'.format( epoch, val_losses.avg, val_accs.avg)) if val_accs.avg >= best_acc: best_acc = val_accs.avg best_epoch = epoch best_dict = model.state_dict() if not (epoch + 1) % args.save_freq: save_checkpoint(model.state_dict(), os.path.join( args.save_folder, args.save_name + '_{}.pth'.format(epoch + 1)), cpu=True) model.load_state_dict(best_dict) test_accs = AverageMeter() test_losses = AverageMeter() with torch.no_grad(): for i, (images, labels) in enumerate(tqdm.tqdm(test_loader, ncols=80)):