def launch_generator(pool: multiprocessing.pool.Pool, generation: Generation): options = restricted_loads(generation.options) logging.info(f"Generating {generation.id} for {len(options)} players") meta = restricted_loads(generation.meta) pool.apply_async(gen_game, (options,), {"race": meta["race"], "sid": generation.id, "owner": generation.owner}, handle_generation_success, handle_generation_failure) generation.state = STATE_STARTED
def launch_generator(pool: multiprocessing.pool.Pool, generation: Generation): try: meta = json.loads(generation.meta) options = restricted_loads(generation.options) logging.info(f"Generating {generation.id} for {len(options)} players") pool.apply_async(gen_game, (options,), {"meta": meta, "sid": generation.id, "owner": generation.owner}, handle_generation_success, handle_generation_failure) except Exception as e: generation.state = STATE_ERROR commit() logging.exception(e) else: generation.state = STATE_STARTED
def _pack_sarcs(tmp_dir: Path, hashes: dict, pool: multiprocessing.pool.Pool): sarc_folders = { d for d in tmp_dir.rglob("**/*") if (d.is_dir() and not "options" in d.relative_to(tmp_dir).parts and d.suffix != ".pack" and d.suffix in util.SARC_EXTS) } if sarc_folders: pool.map(partial(_pack_sarc, hashes=hashes, tmp_dir=tmp_dir), sarc_folders) pack_folders = { d for d in tmp_dir.rglob("**/*") if d.is_dir() and not "options" in d.relative_to(tmp_dir).parts and d.suffix == ".pack" } if pack_folders: pool.map(partial(_pack_sarc, hashes=hashes, tmp_dir=tmp_dir), pack_folders)
def _clean_sarcs(tmp_dir: Path, hashes: dict, pool: multiprocessing.pool.Pool): sarc_files = { file for file in tmp_dir.rglob("**/*") if file.suffix in CLEAN_EXTS and "options" not in file.relative_to(tmp_dir).parts } if sarc_files: print("Creating partial packs...") pool.map(partial(_clean_sarc_file, hashes=hashes, tmp_dir=tmp_dir), sarc_files) sarc_files = { file for file in tmp_dir.rglob("**/*") if file.suffix in CLEAN_EXTS and "options" not in file.relative_to(tmp_dir).parts } if sarc_files: print("Updating pack log...") final_packs = [file for file in sarc_files if file.suffix in CLEAN_EXTS] if final_packs: (tmp_dir / "logs").mkdir(parents=True, exist_ok=True) (tmp_dir / "logs" / "packs.json").write_text( dumps( { util.get_canon_name(file.relative_to(tmp_dir)): str( file.relative_to(tmp_dir) ) for file in final_packs }, indent=2, ) ) else: try: (tmp_dir / "logs" / "packs.json").unlink() except FileNotFoundError: pass else: try: (tmp_dir / "logs" / "packs.json").unlink() except FileNotFoundError: pass