v.print_shape() model = DeepMetric(dm_train, dm_val, dm_test, LOG_DIR + FILE_ID + '.log', args) model.build() model.set_up_train() model.restore(save_dir=SAVE_DIR) model.prepare_test() for activate_k in ACTIVATE_K_SET: performance_th = model.test_th(activate_k, K_SET) performance_vq = model.test_vq(activate_k, K_SET) write_pkl({ 'th': performance_th, 'vq': performance_vq }, path=PKL_DIR + FILE_ID + '_{}.pkl'.format(activate_k)) cwrite = CsvWriter2(2) key_set = [ 'train_nmi', 'test_nmi', 'te_tr_suf', 'te_te_suf', 'te_tr_precision_at_k', 'te_te_precision_at_k', 'te_tr_recall_at_k', 'te_te_recall_at_k' ] for key in key_set: cwrite.add_header(0, str(key) + "_th") cwrite.add_header(1, str(key) + "_vq")
model.set_up_train_hash() try: model.restore_hash(save_dir=HASH_SAVE_DIR) except AttributeError: model.initialize() model.train_hash(args.epoch, save_dir=HASH_SAVE_DIR, board_dir=BOARD_DIR) model.restore_hash(save_dir=HASH_SAVE_DIR) model.prepare_test_hash() performance_hash = model.test_hash_metric(args.k, K_SET) model.delete() del model del dm_train del dm_val del dm_test write_pkl(performance_hash, path=PKL_DIR + HASH_FILE_ID + '.pkl') cwrite = CsvWriter2(1) key_set = [ 'test_nmi', 'te_te_suf', 'te_te_precision_at_k', 'te_te_recall_at_k' ] for key in key_set: cwrite.add_header(0, str(key)) content = '' if 'suf' in str(key): content = listformat(performance_hash[key]) elif 'at_k' in str(key): content = listformat(performance_hash[key]) else: content = performance_hash[key] cwrite.add_content(0, content) cwrite.write(CSV_DIR + HASH_FILE_ID + '.csv')
# load data datasetmanager = DATASETMANAGER_DICT[args.dataset] dm_train, dm_val, dm_test = datasetmanager(args.ltype, nsclass=args.nsclass) for v in [dm_train, dm_val, dm_test]: v.print_shape() model = DeepMetric(dm_train, dm_val, dm_test, LOG_DIR + FILE_ID + '.log', args) model.build() model.set_up_train() try: model.restore(save_dir=SAVE_DIR) except AttributeError: model.initialize() model.train(epoch=args.epoch, save_dir=SAVE_DIR, board_dir=BOARD_DIR) model.restore(save_dir=SAVE_DIR) model.prepare_test() performance = model.test_metric(K_SET) key_set = [ 'te_tr_recall_at_k', 'te_tr_precision_at_k', 'te_te_recall_at_k', 'te_te_precision_at_k' ] cwrite = CsvWriter2(1) for key in key_set: cwrite.add_header(0, str(key)) cwrite.add_content(0, listformat(performance[key])) cwrite.write(CSV_DIR + FILE_ID + '.csv') write_pkl(performance, path=PKL_DIR + FILE_ID + '.pkl')
model.initialize() model.train_hash(args.epoch, save_dir=HASH_SAVE_DIR, board_dir=BOARD_DIR) model.restore_hash(save_dir=HASH_SAVE_DIR) model.prepare_test_hash() performance_hash = model.test_hash_metric(args.k, K_SET) model.delete() del model del dm_train del dm_val del dm_test write_pkl(performance_hash, path=PKL_DIR + HASH_FILE_ID + '_train.pkl') cwrite = CsvWriter2(1) key_set = [ 'train_nmi', 'te_tr_suf', 'te_tr_precision_at_k', 'te_tr_recall_at_k' ] for key in key_set: cwrite.add_header(0, str(key)) content = '' if 'suf' in str(key): content = listformat(performance_hash[key]) elif 'at_k' in str(key): content = listformat(performance_hash[key]) else: content = performance_hash[key] cwrite.add_content(0, content) cwrite.write(CSV_DIR + HASH_FILE_ID + '_train.csv') write_pkl(performance_hash, path=PKL_DIR + HASH_FILE_ID + '_test.pkl') cwrite = CsvWriter2(1)