def main(): args = parser.parse_args() if not args.out_name: args.out_name = osp.basename(osp.dirname(args.data_path)) pprint(vars(args)) if args.load_checkpoint and os.path.isdir(osp.join(args.logs_dir, args.out_name)): features_dict,labels_dict,dataset = load_checkpoint(args) else: print("start new process") print(args.load_checkpoint,os.path.isdir(osp.join(args.logs_dir, args.out_name))) print("model init...") model_weight = torch.load('./pretrained/modelEncoder_Ness_MINI_ProtoNet_MINI_5shot_10way_max_acc.pth') model = AmdimNet(ndf=args.ndf, n_rkhs=args.rkhs, n_depth=args.nd) model_dict = model.state_dict() pretrained_dict = model_weight['model'] pretrained_dict = {k.replace('module.', ''): v for k, v in pretrained_dict.items() if k.replace('module.', '') in model_dict} model_dict.update(pretrained_dict) model.load_state_dict(model_dict) model.eval() model = model.cuda() print("dataset init...") dataset = Dataset(args) dataloader = DataLoader(dataset,batch_size=args.batch_size,pin_memory=True) features_dict = {}; labels_dict = {} print("extracting features") with torch.no_grad(): for i,batch in tqdm(enumerate(dataloader,1)): transformed_imgs, paths, labels = batch transformed_imgs = transformed_imgs.cuda() features_batch = model(transformed_imgs) for path,feature,label in zip(paths,features_batch,labels): features_dict[path] = feature.cpu() labels_dict[path] = label if label else None # if from query: label == '' save_checkpoint(args,features_dict,labels_dict,dataset) print("saving checkpoints log complete") support_features = torch.stack([features_dict[path] for path in dataset.support ]) query_features = torch.stack([features_dict[path] for path in dataset.query]) print(torch.tensor(dataset.class_lens)) indices = torch.cumsum(torch.tensor(dataset.class_lens),dim=0) indices = torch.cat([torch.tensor([0]),indices]) mean_support_featers = torch.stack([torch.mean(support_features[ indices[i]:indices[i+1] ],dim=0) for i in range(len(indices)-1)]) logits = euclidean_metric(query_features, mean_support_featers) / args.temperature # distance here is negative --> similarity print(logits.shape) return logits,dataset
parser.add_argument('--gpu', default='0') # AMDIM Modelrd parser.add_argument('--ndf', type=int, default=256) parser.add_argument('--rkhs', type=int, default=2048) parser.add_argument('--nd', type=int, default=10) parser.add_argument('--trlog_checkpoint', type=str, default=None) parser.add_argument('--model_checkpoint', type=str, default=None) parser.add_argument('--load_train_checkpoint', action='store_true') args = parser.parse_args() sys.stdout = Logger(osp.join(args.save_path, 'log.txt'), append=args.load_train_checkpoint) pprint(vars(args)) set_gpu(args.gpu) if args.dataset == 'MiniImageNet': # Handle MiniImageNet from feat.dataloader.mini_imagenet import MiniImageNet as Dataset elif args.dataset == 'CUB': from feat.dataloader.cub import CUB as Dataset elif args.dataset == 'TieredImageNet': from feat.dataloader.tiered_imagenet import tieredImageNet as Dataset else: raise ValueError('Non-supported Dataset.') trainset = Dataset('train', args) train_sampler = CategoriesSampler(trainset.label, 100, args.way,