def _get_info_from_the_folder(folder_path): print("process the folder: {}".format(folder_path)) arguments_path = os.path.join(folder_path, "arguments.pickle") # collect runtime json info for one rank. sub_folder_paths = sorted([ sub_folder_path for sub_folder_path in list_files(folder_path) if ".tar" not in sub_folder_path and "pickle" not in sub_folder_path ]) # return the information. return ( folder_path, { "arguments": _get_arguments(load_pickle(arguments_path)), # single worker records. "single_records": _parse_runtime_infos(os.path.join(sub_folder_paths[0])), # multiple workers records. "multi_records": [ _parse_runtime_infos(os.path.join(sub_folder_path)) for sub_folder_path in sub_folder_paths ], }, )
def _get_info_from_the_folder(folder_path): print("process the folder: {}".format(folder_path)) arguments_path = os.path.join(folder_path, "arguments.pickle") # collect runtime json info for one rank. sub_folder_paths = sorted( [ sub_folder_path for sub_folder_path in list_files(folder_path) if ".tar" not in sub_folder_path and "pickle" not in sub_folder_path ] ) # return the information. return ( folder_path, { "arguments": _get_arguments(load_pickle(arguments_path)), # single worker records. "single_records": _parse_runtime_info( os.path.join(sub_folder_paths[0], "log.json") ), # records w.r.t. averaged model "averaged_records": _parse_averaged_info( [ os.path.join(sub_folder_path, "log.json") for sub_folder_path in sub_folder_paths ] ), }, )
def get_pickle_info(root_data_path, experiments): file_paths = [] for experiment in experiments: file_paths += [ os.path.join(root_data_path, experiment, file) for file in os.listdir(os.path.join(root_data_path, experiment)) if "pickle" in file ] results = dict((path, load_pickle(path)) for path in file_paths) info = functools.reduce(lambda a, b: a + b, list(results.values())) return info