def val(args, model=None, current_epoch=0): top1 = AverageMeter() top5 = AverageMeter() top1.reset() top5.reset() if model is None: model = get_model(args) model.eval() _, val_loader = data_loader(args, test_path=True) save_atten = SAVE_ATTEN(save_dir=args.save_atten_dir) global_counter = 0 prob = None gt = None for idx, dat in tqdm(enumerate(val_loader)): img_path, img, label_in = dat global_counter += 1 if args.tencrop == 'True': bs, ncrops, c, h, w = img.size() img = img.view(-1, c, h, w) label_input = label_in.repeat(10, 1) label = label_input.view(-1) else: label = label_in img, label = img.cuda(), label.cuda() img_var, label_var = Variable(img), Variable(label) logits = model(img_var, label_var) logits0 = logits[0] logits0 = F.softmax(logits0, dim=1) if args.tencrop == 'True': logits0 = logits0.view(bs, ncrops, -1).mean(1) # Calculate classification results prec1_1, prec5_1 = Metrics.accuracy(logits0.cpu().data, label_in.long(), topk=(1, 5)) # prec3_1, prec5_1 = Metrics.accuracy(logits[1].data, label.long(), topk=(1,5)) top1.update(prec1_1[0], img.size()[0]) top5.update(prec5_1[0], img.size()[0]) # save_atten.save_heatmap_segmentation(img_path, np_last_featmaps, label.cpu().numpy(), # save_dir='./save_bins/heatmaps', size=(0,0), maskedimg=True) # np_last_featmaps = logits[2].cpu().data.numpy() np_last_featmaps = logits[-1].cpu().data.numpy() np_scores, pred_labels = torch.topk(logits0, k=args.num_classes, dim=1) pred_np_labels = pred_labels.cpu().data.numpy() save_atten.save_top_5_pred_labels(pred_np_labels[:, :5], img_path, global_counter) # pred_np_labels[:,0] = label.cpu().numpy() #replace the first label with gt label # save_atten.save_top_5_atten_maps(np_last_featmaps, pred_np_labels, img_path) print('Top1:', top1.avg, 'Top5:', top5.avg)
def val(args, model=None, current_epoch=0): top1 = AverageMeter() top5 = AverageMeter() top1.reset() top5.reset() if model is None: model, _ = get_model(args) model.eval() train_loader, val_loader = data_loader(args, test_path=True) save_atten = SAVE_ATTEN(save_dir='../save_bins/') global_counter = 0 prob = None gt = None for idx, dat in tqdm(enumerate(val_loader)): img_path, img, label_in = dat global_counter += 1 if args.tencrop == 'True': bs, ncrops, c, h, w = img.size() img = img.view(-1, c, h, w) label_input = label_in.repeat(10, 1) label = label_input.view(-1) else: label = label_in img, label = img.cuda(), label.cuda() img_var, label_var = Variable(img), Variable(label) logits = model(img_var, label_var) logits0 = logits[0] logits0 = F.softmax(logits0, dim=1) if args.tencrop == 'True': logits0 = logits0.view(bs, ncrops, -1).mean(1) # Calculate classification results if args.onehot == 'True': val_mAP, prob, gt = cal_mAP(logits0, label_var, prob, gt) # print val_mAP else: prec1_1, prec5_1 = Metrics.accuracy(logits0.cpu().data, label_in.long(), topk=(1, 5)) # prec3_1, prec5_1 = Metrics.accuracy(logits[1].data, label.long(), topk=(1,5)) top1.update(prec1_1[0], img.size()[0]) top5.update(prec5_1[0], img.size()[0]) # model.module.save_erased_img(img_path) last_featmaps = model.module.get_localization_maps() np_last_featmaps = last_featmaps.cpu().data.numpy() # Save 100 sample masked images by heatmaps # if idx < 100/args.batch_size: save_atten.get_masked_img(img_path, np_last_featmaps, label_in.numpy(), size=(0, 0), maps_in_dir=True, only_map=True) # save_atten.save_heatmap_segmentation(img_path, np_last_featmaps, label.cpu().numpy(), # save_dir='./save_bins/heatmaps', size=(0,0), maskedimg=True) # save_atten.get_masked_img(img_path, np_last_featmaps, label_in.numpy(),size=(0,0), # maps_in_dir=True, save_dir='../heatmaps',only_map=True ) # np_scores, pred_labels = torch.topk(logits0,k=args.num_classes,dim=1) # # print pred_labels.size(), label.size() # pred_np_labels = pred_labels.cpu().data.numpy() # save_atten.save_top_5_pred_labels(pred_np_labels[:,:5], img_path, global_counter) # # pred_np_labels[:,0] = label.cpu().numpy() #replace the first label with gt label # # save_atten.save_top_5_atten_maps(np_last_featmaps, pred_np_labels, img_path) if args.onehot == 'True': print val_mAP print 'AVG:', np.mean(val_mAP) else: print('Top1:', top1.avg, 'Top5:', top5.avg)
from ss_datalayer import SSDatalayer from oneshot import * from utils.Restore import restore from utils import AverageMeter from utils.save_atten import SAVE_ATTEN from utils.segscorer import SegScorer from utils import Metrics os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID' os.environ['CUDA_VISIBLE_DEVICES'] = '1' ROOT_DIR = '/'.join(os.getcwd().split('/')) print ROOT_DIR save_atten = SAVE_ATTEN() SNAPSHOT_DIR = os.path.join(ROOT_DIR, 'snapshots_1way1shot_heat') DISP_INTERVAL = 20 def get_arguments(): parser = argparse.ArgumentParser(description='OneShot') parser.add_argument("--arch", type=str, default='onemodel_sg_one') parser.add_argument("--disp_interval", type=int, default=100) parser.add_argument( "--snapshot_dir", type=str, default='/home/liruimin/SG-One-master/snapshots_1way1shot_heat') parser.add_argument("--lr", type=float, default=1e-5)