def main(): # basic setting random.seed(opt['seed']) np.random.seed(opt['seed']) # create approach isoh = IsoHAlgo(__DIM) logger.info('creating isoh method done') # load features featurepath = os.path.join(opt['featurepath'], 'videos-features.h5') features, mean_feature = load_features(featurepath) # train isoh train_features = load_features(featurepath, dtype='array') isoh.learn_hash_function(train_features) # generate binary codes codes = isoh.generate_codes(features, mean_feature=mean_feature) logger.info('generating codes done') # load groundtruth and unlabeled-keys gnds = load_groundtruth('test_groundtruth') unlabeled_keys = get_video_id('unlabeled-data') logger.info('loading gnds and unlabeled keys done. #query: {}'.format(len(gnds))) # calculate map map = calc_hamming_ranking(codes, unlabeled_keys, gnds) logger.info('map: {:.4f}'.format(map)) logger.info('all done')
'''learning procedure''' logger.info('start training procedure') start_t = time.time() db_image_codes, db_text_codes = dlfh_algo(train_labels=database_label) '''out-of-sample extension''' wx = linear_hash(database_image, db_image_codes) wy = linear_hash(database_text, db_text_codes) end_t = time.time() - start_t '''start encoding''' test_image_codes = np.sign(test_image.dot(wx)) test_text_codes = np.sign(test_text.dot(wy)) param = {'topk': [100]} metrici2t = calc_hamming_ranking(test_image_codes, db_text_codes, test_label, database_label, param) metrict2i = calc_hamming_ranking(test_text_codes, db_image_codes, test_label, database_label, param) mapi2t = metrici2t['map'] mapt2i = metrict2i['map'] topprei2t = float(metrici2t['topkpre'][0]) toppret2i = float(metrict2i['topkpre'][0]) logger.info('map@i2t: {:>.4f}, map@t2i: {:>.4f}.'.format(mapi2t, mapt2i)) logger.info('precision@i2t: {:>.4f}, precision@t2i: {:>.4f}.'.format(topprei2t, toppret2i))