SAVE_DIR = RESULT_DIR + 'metric/save/%s/' % FILE_ID LOG_DIR = RESULT_DIR + 'metric/log/' CSV_DIR = RESULT_DIR + 'metric/csv/' PKL_DIR = RESULT_DIR + 'metric/pkl/' BOARD_DIR = RESULT_DIR + 'metric/board/%s/' % FILE_ID create_muldir(SAVE_DIR, BOARD_DIR, LOG_DIR, CSV_DIR, PKL_DIR) # load data datasetmanager = DATASETMANAGER_DICT[args.dataset] dm_train, dm_val, dm_test = datasetmanager(args.ltype, nsclass=args.nsclass) for v in [dm_train, dm_val, dm_test]: v.print_shape() model = DeepMetric(dm_train, dm_val, dm_test, LOG_DIR + FILE_ID + '.log', args) model.build() model.set_up_train() try: model.restore(save_dir=SAVE_DIR) except AttributeError: model.initialize() model.train(epoch=args.epoch, save_dir=SAVE_DIR, board_dir=BOARD_DIR) model.restore(save_dir=SAVE_DIR) model.prepare_test() performance = model.test_metric(K_SET) key_set = [ 'te_tr_recall_at_k', 'te_tr_precision_at_k', 'te_te_recall_at_k', 'te_te_precision_at_k' ]
best_performance = performance best_file_id = file_id print("best performance : {} from {}".format(best_performance, best_file_id)) copy_file(CSV_DIR + best_file_id + '.csv', copy_dst_csv) copy_file(PKL_DIR + best_file_id + '.pkl', copy_dst_pkl) copy_dir(RESULT_DIR + 'metric/save/' + best_file_id, SAVE_DIR) # load data datasetmanager = DATASETMANAGER_DICT[args.dataset] dm_train, dm_val, dm_test = datasetmanager(args.ltype, nsclass=args.nsclass) for v in [dm_train, dm_val, dm_test]: v.print_shape() model = DeepMetric(dm_train, dm_val, dm_test, LOG_DIR + FILE_ID + '.log', args) model.build() model.set_up_train() model.restore(save_dir=SAVE_DIR) model.prepare_test() for activate_k in ACTIVATE_K_SET: performance_th = model.test_th(activate_k, K_SET) performance_vq = model.test_vq(activate_k, K_SET) write_pkl({ 'th': performance_th, 'vq': performance_vq }, path=PKL_DIR + FILE_ID + '_{}.pkl'.format(activate_k))
LOG_DIR = RESULT_DIR + 'exp1/log/' CSV_DIR = RESULT_DIR + 'exp1/csv/' PKL_DIR = RESULT_DIR + 'exp1/pkl/' BOARD_DIR = RESULT_DIR + 'exp1/board/%s/' % HASH_FILE_ID create_muldir(SAVE_DIR, LOG_DIR, CSV_DIR, PKL_DIR, HASH_SAVE_DIR, BOARD_DIR) # load data datasetmanager = DATASETMANAGER_DICT[args.dataset] dm_train, dm_val, dm_test = datasetmanager(args.hltype, nsclass=args.nsclass) for v in [dm_train, dm_val, dm_test]: v.print_shape() model = DeepMetric(dm_train, dm_val, dm_test, LOG_DIR + HASH_FILE_ID + '.log', args) model.build() model.restore(save_dir=SAVE_DIR) model.prepare_test() model.build_hash() model.set_up_train_hash() try: model.restore_hash(save_dir=HASH_SAVE_DIR) except AttributeError: model.initialize() model.train_hash(args.epoch, save_dir=HASH_SAVE_DIR, board_dir=BOARD_DIR) model.restore_hash(save_dir=HASH_SAVE_DIR) model.prepare_test_hash() performance_hash = model.test_hash_metric(args.k, K_SET)