def test_dump_and_partial_load(self): result_num = 100 confs, result_data, histories = random_benchmark_conf_data(result_num, 2000000, hist_size=1500000) br = BenchmarkResult(result_dict=result_data, benchmark_configs=confs, histories=histories) br.dump("./results_all", dump_configs=True, dump_histories=True, max_mb_per_file = 5) br_loaded = BenchmarkResult.load("./results_all") loaded_dict = br_loaded.get_result_dict() self.assertEqual(br.get_result_dict(), loaded_dict) loaded_configs_idx = list(range(10, 20)) processed_files = br_loaded.load_benchmark_configs(config_idx_list = loaded_configs_idx) loaded_confs = br_loaded.get_benchmark_configs() self.assertEqual(len(loaded_confs), 10) # 2mb per conf, max 5 mb per file -> 2 confs per file -> 10/2 = 5files self.assertEqual(len(processed_files), 5) for conf_idx in loaded_configs_idx: self.assertEqual(br_loaded.get_benchmark_config(conf_idx), confs[conf_idx]) loaded_configs_idx = list(range(10, 27)) processed_files = br_loaded.load_histories(config_idx_list = loaded_configs_idx) loaded_histories = br_loaded.get_histories() self.assertEqual(len(loaded_histories), 18) # one more as specified since it was in the last file # 1.5mb per history, max 5 mb per file -> 3 confs per file -> 17/3 = 6files self.assertEqual(len(processed_files), 6) for conf_idx in loaded_configs_idx: self.assertEqual(br_loaded.get_history(conf_idx), histories[conf_idx])
def merge_checkpoint_benchmark_results(checkpoint_dir): checkpoint_files = glob.glob(os.path.join(checkpoint_dir, "**/*.ckpnt"), recursive=True) merged_result = BenchmarkResult() # merge all checkpoints with new results for checkpoint_file in checkpoint_files: logging.info("Loading checkpoint {}".format( os.path.abspath(checkpoint_file))) next_result = BenchmarkResult.load(os.path.abspath(checkpoint_file), \ load_configs=True, load_histories=True) merged_result.extend(next_result) # dump merged result if len(merged_result.get_result_dict()) > 0: logging.info("Dumping merged result") merged_result_filename = os.path.join(checkpoint_dir, "merged_results.ckpnt") merged_result.dump(merged_result_filename, \ dump_configs=True, dump_histories=True) # delete checkpoints for checkpoint_file in checkpoint_files: if checkpoint_file == merged_result_filename: continue os.remove(checkpoint_file) logging.info( "Removed old checkpoint file {}".format(checkpoint_file)) return merged_result
def test_dump_and_load_benchmark_configs(self): result_num = 100 confs, result_data, _ = random_benchmark_conf_data(result_num, 2000000) br = BenchmarkResult(result_dict=result_data, benchmark_configs=confs) br.dump("./results_with_confs", dump_configs=True, max_mb_per_file = 5) br_loaded = BenchmarkResult.load("./results_with_confs") br_loaded.load_benchmark_configs(config_idx_list = list(range(0, result_num))) loaded_confs = br_loaded.get_benchmark_configs() self.assertEqual(confs, loaded_confs) loaded_dict = br_loaded.get_result_dict() self.assertEqual(br.get_result_dict(), loaded_dict)