def main(src_folder, target_folder, save_single_pickle, with_eval_stats): if not os.path.exists(target_folder): os.makedirs(target_folder) if with_eval_stats: eval_stats = phyre.eval_task_complexity.load_all_eval_stats() else: eval_stats = None tasks = phyre.loader.load_tasks_from_folder(src_folder, eval_stats=eval_stats) if save_single_pickle: path = os.path.join(target_folder, phyre.settings.TASK_PICKLE_NAME) task_collection = task_if.TaskCollection( tasks=sorted(tasks.values(), key=lambda task: task.taskId)) with lzma.open(path, 'w') as stream: stream.write(phyre.simulator.serialize(task_collection)) else: with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: futures = [ executor.submit(_save_task, task_id, thrift_task, target_folder) for task_id, thrift_task in tasks.items() ] for future in futures: task_id = future.result() print("Saved task", task_id)
def load_compiled_task_dict() -> Dict[str, task_if.Task]: """Helper function to load the default task dump.""" path = phyre.settings.TASK_DIR / phyre.settings.TASK_PICKLE_NAME with lzma.open(path) as stream: collection = phyre.simulator.deserialize(task_if.TaskCollection(), stream.read()) return {task.taskId: task for task in collection.tasks}
def main(src_folder=str(phyre.settings.TASK_SCRIPTS_DIR), target_folder=phyre.settings.TASK_DIR, save_single_pickle=True, with_eval_stats=False): if not os.path.exists(target_folder): os.makedirs(target_folder) if with_eval_stats: eval_stats = phyre.eval_task_complexity.load_all_eval_stats() else: eval_stats = None tasks = phyre.loader.load_tasks_from_folder(src_folder, eval_stats=eval_stats) if save_single_pickle: per_file = collections.defaultdict(list) for task in tasks.values(): per_file[phyre.loader.task_id_to_pickle(task.taskId)].append(task) for fname, task_collection in per_file.items(): task_collection = task_if.TaskCollection( tasks=sorted(task_collection, key=lambda task: task.taskId)) path = os.path.join(target_folder, fname) with lzma.open(path, 'w') as stream: stream.write(phyre.simulator.serialize(task_collection)) else: with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: futures = [ executor.submit(_save_task, task_id, thrift_task, target_folder) for task_id, thrift_task in tasks.items() ] for future in futures: task_id = future.result() print("Saved task", task_id)
def load_compiled_task_dict( task_ids: Optional[Sequence[str]] = None) -> Dict[str, task_if.Task]: """Helper function to load the default task dump.""" if task_ids is not None: fnames = frozenset(map(task_id_to_pickle, task_ids)) paths = [phyre.settings.TASK_DIR / fname for fname in fnames] else: paths = phyre.settings.TASK_DIR.glob("*.bin.lzma") data = {} for path in paths: with lzma.open(path) as stream: collection = phyre.simulator.deserialize(task_if.TaskCollection(), stream.read()) data.update({task.taskId: task for task in collection.tasks}) if task_ids is not None: missing = frozenset(task_ids).difference(data) if missing: raise RuntimeError('Unknown task ids: %s' % missing) return data
def load_compiled_task_dict(): """Helper function to load the default task dump.""" my_path = os.path.abspath(os.path.dirname(__file__)) path = os.path.join(my_path, "../../../../data/tasks.bin.lzma") phyre_task = [] task_map_all = {} with lzma.open(path) as stream: collection = phyre.simulator.deserialize(task_if.TaskCollection(), stream.read()) for task in collection.tasks: task_id = task.taskId task = task_id.split(":") task_template = task[0] task_mod = task[1] phyre_task.append(task) if task_template not in task_map_all: task_map_all[task_template] = [] task_map_all[task_template].append(task_mod) return task_map_all, phyre_task