def beet_default(ctx: Context): for lib in sorted(ctx.directory.glob("lib_*")): config = { "data_pack": { "load": [lib.name], }, "pipeline": ["demo.export_mcmeta"], } ctx.require(subproject(config)) with ctx.worker(bridge) as channel: # Request mcmeta files from the worker channel.send(None) final_mcmeta = JsonFile({ "pack": { "pack_format": 7, "description": "This is the root", }, "custom_data": 0, "libs": [], }) for mcmeta in channel: final_mcmeta.data["custom_data"] += mcmeta.data["custom_data"] final_mcmeta.data["libs"].append(mcmeta.data) ctx.data.mcmeta = final_mcmeta
def build_modules(ctx: Context): version = os.getenv("VERSION", "1.18") prefix = int(os.getenv("PATCH_PREFIX", 0)) dev: list[str] = [ f"gm4_{id}" for id in ctx.meta.get("dev", "").split("+") if id ] modules = [{"id": p.name} for p in sorted(ctx.directory.glob("gm4_*"))] if "gm4_all" in dev: dev = [m["id"] for m in modules if m["id"] != "gm4_template_pack"] print(f"[GM4] Found {len(modules)} modules") head = run(["git", "rev-parse", "HEAD"]) try: with open(f"{RELEASE}/{version}/meta.json", "r") as f: meta = json.load(f) released_modules = meta["modules"] last_commit = meta["last_commit"] except: released_modules = [] last_commit = None if 'pull_request_base' in ctx.meta: last_commit = ctx.meta.get('pull_request_base') with open("contributors.json", "r") as f: contributors: dict[str, dict] = { entry["name"]: entry for entry in json.load(f) } changed_modules = set() for module in modules: id = module["id"] try: with open(f"{id}/pack.mcmeta", "r+") as f: meta: dict = json.load(f) module["name"] = meta.get("module_name", id) module["description"] = meta.get("site_description", "") module["categories"] = meta.get("site_categories", []) module["libraries"] = meta.get("libraries", []) module["requires"] = [ f"gm4_{id}" for id in meta.get("required_modules", []) ] module["recommends"] = [ f"gm4_{id}" for id in meta.get("recommended_modules", []) ] module["wiki_link"] = meta.get("wiki_link", "") module["video_link"] = meta.get("video_link", "") module["credits"] = meta.get("credits", {}) module["hidden"] = meta.get("hidden", False) module["important_note"] = meta.get("important_note", False) except: module["id"] = None continue if dev: continue diff = run([ "git", "diff", last_commit, "--shortstat", "--", BASE, *module["libraries"], id ]) if last_commit else True if diff: print('Diff', last_commit, id, diff) changed_modules.add(id) released: dict | None = next( (m for m in released_modules if m["id"] == id), None) if not diff and released: module["patch"] = released["patch"] else: patch = released["patch"] if released else prefix if not ctx.meta.get("pull_request_base"): patch += 1 print(f"Updating {id} -> {patch}") module["patch"] = patch if dev: libs = set() packs = set() for d in dev: packs.add(d) module = next((m for m in modules if m["id"] == d), None) if module is None: raise ValueError(f"Module '{d}' not found") for l in module["libraries"]: libs.add(l) for r in module["requires"]: packs.add(r) required = next(m for m in modules if m["id"] == r) for l in required["libraries"]: libs.add(l) ctx.require(load(data_pack=[BASE, *libs, *packs])) print( f"[GM4] Generated development pack: [{' '.join([*list(packs), *list(libs)])}]" ) else: module_updates = [{k: m[k] for k in ["id", "name", "patch"]} for m in modules if m["id"]] os.makedirs(OUTPUT, exist_ok=True) with open(f"{OUTPUT}/meta.json", "w") as f: out = { "last_commit": head, "modules": [m for m in modules if m.get("id") is not None], "contributors": contributors, } json.dump(out, f, indent=2) f.write('\n') for module in modules: id = module["id"] if not id: continue if ctx.meta.get("pull_request_base") and id not in changed_modules: continue ctx.require( subproject({ "id": id, "data_pack": { "name": f"{id}_{version.replace('.', '_')}", "load": [BASE, *module["libraries"], id], "zipped": True, }, "output": OUTPUT, "pipeline": ["gm4.module_updates", "gm4.populate_credits"], "meta": { "module_updates": module_updates, "contributors": contributors, } })) print(f"Generated {id}")
def beet_default(ctx: Context): ctx.require(subproject({"data_pack": {"load": ["src"]}}))
def build_modules(ctx: Context): version = os.getenv("VERSION", "1.18") modules = [{"id": p.name} for p in sorted(ctx.directory.glob("gm4_*"))] print(f"[GM4] Found {len(modules)} modules") head = run(["git", "rev-parse", "HEAD"]) with open("contributors.json", "r") as f: contributors: dict[str, dict] = { entry["name"]: entry for entry in json.load(f) } for module in modules: id = module["id"] try: with open(f"{id}/pack.mcmeta", "r+") as f: meta: dict = json.load(f) if "module_id" not in meta: module["id"] = None continue module["name"] = meta.get("module_name", id) module["description"] = meta.get("site_description", "") module["categories"] = meta.get("site_categories", []) module["libraries"] = meta.get("libraries", []) module["requires"] = [ f"gm4_{id}" for id in meta.get("required_modules", []) ] module["recommends"] = [ f"gm4_{id}" for id in meta.get("recommended_modules", []) ] module["wiki_link"] = meta.get("wiki_link", "") module["video_link"] = meta.get("video_link", "") module["credits"] = meta.get("credits", {}) module["hidden"] = meta.get("hidden", False) except: module["id"] = None os.makedirs(OUTPUT, exist_ok=True) with open(f"{OUTPUT}/meta.json", "w") as f: out = { "last_commit": head, "modules": [m for m in modules if m.get("id") is not None], "contributors": contributors, } json.dump(out, f, indent=2) f.write('\n') for module in modules: id = module["id"] if not id: continue ctx.require( subproject({ "id": id, "resource_pack": { "name": f"{id}_{version.replace('.', '_')}", "load": [*module["libraries"], id], "zipped": True, }, "output": OUTPUT, "pipeline": ["gm4.populate_credits"], "meta": { "contributors": contributors, } })) print(f"Generated {id}")
def beet_default(ctx: Context): ctx.require(subproject("@demo"))
def build_modules(ctx: Context): version = os.getenv("VERSION", "1.18") prefix = int(os.getenv("PATCH_PREFIX", 0)) modules = [{"id": p.name} for p in sorted(ctx.directory.glob("gm4_*"))] print(f"[GM4] Found {len(modules)} modules") head = run(["git", "rev-parse", "HEAD"]) try: with open(f"{RELEASE}/{version}/meta.json", "r") as f: meta = json.load(f) released_modules = meta["modules"] last_commit = meta["last_commit"] except: released_modules = [] last_commit = None print(f"version={version} HEAD={head} last={last_commit}") for module in modules: id = module["id"] try: with open(f"{id}/pack.mcmeta", "r") as f: meta: dict = json.load(f) module["name"] = meta.get("module_name", id) module["description"] = meta.get("site_description", "") module["categories"] = meta.get("site_categories", []) module["libraries"] = meta.get("libraries", []) module["hidden"] = meta.get("hidden", False) except: module["id"] = None continue diff = run([ "git", "diff", last_commit, "--shortstat", "--", BASE, *module["libraries"], id ]) if last_commit else True released: dict | None = next( (m for m in released_modules if m["id"] == id), None) if not diff and released: module["patch"] = released["patch"] else: new_patch = released["patch"] + 1 if released else prefix module["patch"] = new_patch print(f"Updating {id} -> {new_patch}") module_updates = [{k: m[k] for k in ["id", "name", "patch"]} for m in modules if m["id"]] os.makedirs(OUTPUT, exist_ok=True) with open(f"{OUTPUT}/meta.json", "w") as f: out = { "last_commit": head, "modules": [m for m in modules if m.get("id") is not None], } json.dump(out, f, indent=2) f.write('\n') for module in modules: id = module["id"] if not id: continue ctx.require( subproject({ "id": id, "data_pack": { "name": f"{id}_{version.replace('.', '_')}", "load": [BASE, *module["libraries"], id], "zipped": True, }, "output": OUTPUT, "pipeline": ["gm4.module_updates"], "meta": { "module_updates": module_updates } })) print(f"Generated {id}")
def beet_default(ctx: Context): ctx.require(subproject(f"@{__name__}/beet-nested.yml"))