def main(): cfg = configdataset(test_dataset, data_root) ranks = global_search(GLOBAL_FEATURE_PATH) reportMAP(test_dataset, cfg, ranks) #_, ranks_after_gv = rerankGV(cfg, LOCAL_FEATURE_PATH, ranks) _, ranks_after_gv = rerankGV_mulprocess(cfg, LOCAL_FEATURE_PATH, ranks) #np.save("ranks_after_gv.npy", ranks_after_gv) reportMAP(test_dataset, cfg, ranks_after_gv) print("Done!")
def rankASMK(): cfg = configdataset(test_dataset, data_root) ranks = global_search(GLOBAL_FEATURE_PATH) reportMAP(test_dataset, cfg, ranks) with open(ASMK_SCORE_PATH, "rb") as fin: scores = pickle.load(fin) print("scores", scores.shape) ranks = ranks.T ranks_after = ranks for i in range(ranks.shape[0]): asmk_scores = scores[i, ranks[i, :NUM_RERANK]] ranks_after[i, :NUM_RERANK] = ranks[i, np.argsort(-1 * asmk_scores)] reportMAP(test_dataset, cfg, ranks_after.T) print("Done!")
#--------------------------------------------------------------------- # Read images #--------------------------------------------------------------------- def pil_loader(path): # open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835) with open(path, 'rb') as f: img = Image.open(f) return img.convert('RGB') print('>> {}: Processing test dataset...'.format(test_dataset)) # config file for the dataset # separates query image list from database image list, if revisited protocol used cfg = configdataset(test_dataset, os.path.join(data_root, 'datasets')) # query images for i in np.arange(cfg['nq']): qim = pil_loader(cfg['qim_fname'](cfg, i)).crop(cfg['gnd'][i]['bbx']) ##------------------------------------------------------ ## Perform image processing here, eg, feature extraction ##------------------------------------------------------ print('>> {}: Processing query image {}'.format(test_dataset, i + 1)) for i in np.arange(cfg['n']): im = pil_loader(cfg['im_fname'](cfg, i)) ##------------------------------------------------------ ## Perform image processing here, eg, feature extraction ##------------------------------------------------------ print('>> {}: Processing database image {}'.format(test_dataset, i + 1))
if i==0: code=gd print(111) else: code=np.concatenate((code,gd),axis=0) code =np.array(code) code -= np.mean(code, axis=0) code /= np.std(code, axis=0) print(code.shape) sim = np.dot(code, query.T) ranks = np.argsort(-sim, axis=0) dataset='roxford5k' INPUT_PATH = '/home/yangyc/revisitop-master/data/datasets/' cfg = configdataset(dataset,INPUT_PATH) gnd = cfg['gnd'] # evaluate ranks ks = [1, 5, 10] # search for easy gnd_t = [] for i in range(len(gnd)): g = {} g['ok'] = np.concatenate([gnd[i]['easy']]) g['junk'] = np.concatenate([gnd[i]['junk'], gnd[i]['hard']]) gnd_t.append(g) mapE, apsE, mprE, prsE = compute_map(ranks, gnd_t, ks)
# Check, and, if necessary, download distractor dataset download_distractors(data_root) # Set up the dataset name distractors_dataset = 'revisitop1m' # --------------------------------------------------------------------- # Read images # --------------------------------------------------------------------- def pil_loader(path): # to avoid crashing for truncated (corrupted images) ImageFile.LOAD_TRUNCATED_IMAGES = True # open path as file to avoid ResourceWarning # (https://github.com/python-pillow/Pillow/issues/835) with open(path, 'rb') as f: img = Image.open(f) return img.convert('RGB') print('>> {}: Processing dataset...'.format(distractors_dataset)) # config file for the dataset cfg = configdataset(distractors_dataset, os.path.join(data_root, 'datasets')) for i in np.arange(cfg['n']): im = pil_loader(cfg['im_fname'](cfg, i)) ##------------------------------------------------------ ## Perform image processing here, eg, feature extraction ##------------------------------------------------------ print('>> {}: Processing image {}'.format(distractors_dataset, i + 1))