global_count = global_count + 1 if torch.cuda.is_available(): data, _ = [_.cuda() for _ in batch] else: data = batch[0] p = args.shot * args.way data_shot, data_query = data[:p], data[p:] logits = model(data_shot, data_query) loss = F.cross_entropy(logits, label) acc = count_acc(logits, label) writer.add_scalar('data/loss', float(loss), global_count) writer.add_scalar('data/acc', float(acc), global_count) print('epoch {}, train {}/{}, loss={:.4f} acc={:.4f}'.format( epoch, i, len(train_loader), loss.item(), acc)) tl.add(loss.item()) ta.add(acc) optimizer.zero_grad() loss.backward() optimizer.step() tl = tl.item() ta = ta.item() model.eval() vl = Averager() va = Averager() label = torch.arange(args.way).repeat(args.query)
test_set = Dataset('test', args) sampler = CategoriesSampler(test_set.label, 10000, args.way, args.shot + args.query) loader = DataLoader(test_set, batch_sampler=sampler, num_workers=8, pin_memory=True) test_acc_record = np.zeros((10000,)) model.load_state_dict(torch.load(args.model_path)['params']) model.eval() ave_acc = Averager() label = torch.arange(args.way).repeat(args.query) if torch.cuda.is_available(): label = label.type(torch.cuda.LongTensor) else: label = label.type(torch.LongTensor) with torch.no_grad(): for i, batch in enumerate(loader, 1): if torch.cuda.is_available(): data, _ = [_.cuda() for _ in batch] else: data = batch[0] k = args.way * args.shot data_shot, data_query = data[:k], data[k:] logits, _ = model(data_shot, data_query) acc = count_acc(logits, label) ave_acc.add(acc) test_acc_record[i-1] = acc print('batch {}: {:.2f}({:.2f})'.format(i, ave_acc.item() * 100, acc * 100)) m, pm = compute_confidence_interval(test_acc_record) print('Test Acc {:.4f} + {:.4f}'.format(m, pm))