def extract_whd(entry): if entry_is_notwhdl(entry): dest = get_whd_dir(entry) util.lha_extract(entry["archive_path"], dest) elif "archive_path" in entry and util.is_file(entry["archive_path"]): dest = get_whd_dir(entry) util.lha_extract(entry["archive_path"], dest) info_path = os.path.join(dest, entry["slave_dir"] + ".info") if util.is_file(info_path): os.remove(info_path) else: print("whdload archive not found:", entry["id"])
def verify(): vars = ["AGSCONTENT", "AGSDEST", "AGSTEMP", "FSUAEBIN", "FSUAEROM"] for var in vars: if os.getenv(var) is None: raise IOError( "missing {} environment variable - check .env!".format(var)) if not util.is_dir(content()): raise IOError("AGSCONTENT is not a directory - check .env!") if not util.is_file(util.path(os.getenv("FSUAEBIN"))): raise IOError("FSUAEBIN is not a file - check .env!") if not util.is_file(util.path(os.getenv("FSUAEROM"))): raise IOError("FSUAEROM is not a file - check .env!") return True
def main(): parser = argparse.ArgumentParser() parser.add_argument("image", metavar="FILE", help="image") parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose output") try: args = parser.parse_args() if not util.is_file(args.image): raise IOError("file doesn't exist: " + args.image) return 1 (stats, r) = analyze(args.image) if args.verbose: print(stats, "->") print(r) return 0 # except Exception as err: except IOError as err: print("error - {}".format(err)) sys.exit(1)
def index_whdload_archives(basedir): basedir += os.sep print("enumerating archives..", end="", flush=True) count = 0 d = {} for r, _, f in os.walk(basedir): for file in f: if file.endswith(".lha"): count += 1 if count % 100 == 0: print(".", end="", flush=True) path = util.path(r, file) db_path = path.split(basedir)[1] slave_category = db_path.split(os.sep)[0] if slave_category in ["game", "demo", "mags"]: arc = LhaFile(path) for n in arc.namelist(): n = n.replace("\\", "/") if n.lower().endswith(".slave"): if len(n.split("/")) > 2: pass # skip slaves beneath root else: slave_id = slave_category + "--" + n[:-6].replace( "/", "--").lower() slave_ver = "v1.0" try: verstr = file[:-4].split("_")[1] if verstr.startswith("v"): slave_ver = verstr except Exception: pass d[slave_id] = { "id": slave_id, "archive_path": db_path, "slave_path": n, "slave_version": slave_ver } elif slave_category in [ "game-notwhdl", "demo-notwhdl", "mags-notwhdl" ]: slave_id = slave_category + "--" + os.path.splitext( os.path.basename(path))[0].lower() if util.is_file(path.replace(".lha", ".run")): d[slave_id] = { "id": slave_id, "archive_path": db_path, "slave_path": None, "slave_version": None } print("\n", flush=True) return d
def extract_whd(entry): arc_path = get_archive_path(entry) if not arc_path: print(" > WARNING: content archive not found:", entry["id"]) else: dest = get_whd_dir(entry) if entry_is_notwhdl(entry): util.lha_extract(arc_path, dest) else: util.lha_extract(arc_path, dest) info_path = util.path(dest, entry["slave_dir"] + ".info") if util.is_file(info_path): os.remove(info_path)
def main(): global g_args, g_db, g_out_dir, g_clone_dir parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", dest="config_file", required=True, metavar="FILE", type=lambda x: util.argparse_is_file(parser, x), help="configuration file") parser.add_argument("-o", "--out_dir", dest="out_dir", metavar="FILE", type=lambda x: util.argparse_is_dir(parser, x), help="output directory") parser.add_argument("-b", "--base_hdf", dest="base_hdf", metavar="FILE", help="base HDF image") parser.add_argument("-a", "--ags_dir", dest="ags_dir", metavar="FILE", type=lambda x: util.argparse_is_dir(parser, x), help="AGS2 configuration directory") parser.add_argument( "-d", "--add_dir", dest="add_dirs", action="append", help="add dir to amiga filesystem (example 'DH1:Music::~/Amiga/Music')" ) parser.add_argument("--all_games", dest="all_games", action="store_true", default=False, help="include all games in database") parser.add_argument("--all_demos", dest="all_demos", action="store_true", default=False, help="include all demos in database") parser.add_argument( "--all_versions", dest="all_versions", action="store_true", default=False, help="include all non-redundant versions of titles (if --all_games)") parser.add_argument("--no_autolists", dest="no_autolists", action="store_true", default=False, help="don't add any auto-lists") parser.add_argument("--no_img", dest="no_img", action="store_true", default=False, help="don't copy screenshots") parser.add_argument("--ecs_versions", dest="ecs", action="store_true", default=False, help="prefer OCS/ECS versions (if --all_games)") parser.add_argument("--force_ntsc", dest="ntsc", action="store_true", default=False, help="force NTSC video mode") parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose output") try: paths.verify() g_args = parser.parse_args() g_db = util.get_db(g_args.verbose) if g_args.out_dir: g_out_dir = g_args.out_dir g_clone_dir = util.path(g_out_dir, "tmp") if util.is_dir(g_clone_dir): shutil.rmtree(g_clone_dir) util.make_dir(util.path(g_clone_dir, "DH0")) config_base_name = os.path.splitext( os.path.basename(g_args.config_file))[0] data_dir = "data" if not util.is_dir(data_dir): raise IOError("data dir doesn't exist: " + data_dir) # extract base image base_hdf = g_args.base_hdf if not base_hdf: base_hdf = util.path(paths.content(), "base", "base.hdf") if not util.is_file(base_hdf): raise IOError("base HDF doesn't exist: " + base_hdf) if g_args.verbose: print("extracting base HDF image... ({})".format(base_hdf)) extract_base_image(base_hdf, get_boot_dir()) # parse menu menu = None if g_args.verbose: print("parsing menu...") menu = util.yaml_load(g_args.config_file) if not isinstance(menu, list): raise ValueError("config file not a list: " + g_args.config_file) # copy base AGS2 config, create database if g_args.verbose: print("building AGS2 database...") base_ags2 = g_args.ags_dir if not base_ags2: base_ags2 = util.path("data", "ags2") if not util.is_dir(base_ags2): raise IOError("AGS2 configuration directory doesn't exist: " + base_ags2) if g_args.verbose: print(" > using configuration: " + base_ags2) util.copytree(base_ags2, get_ags2_dir()) if menu: ags_create_tree(menu) if g_args.all_games: ags_add_all("Game") if g_args.all_demos: ags_add_all("Demo") ags_add_all("Mags") if not g_args.no_autolists: ags_create_autoentries() create_vadjust_dats() # extract whdloaders if g_args.verbose: print("extracting {} content archives...".format( len(g_entries.items()))) extract_entries(g_entries) # copy extra files config_extra_dir = util.path(os.path.dirname(g_args.config_file), config_base_name) if util.is_dir(config_extra_dir): if g_args.verbose: print("copying configuration extras...") util.copytree(config_extra_dir, g_clone_dir) # copy additional directories if g_args.add_dirs: if g_args.verbose: print("copying additional directories...") for s in g_args.add_dirs: d = s.split("::") if util.is_dir(d[0]): dest = util.path(g_clone_dir, d[1].replace(":", "/")) print(" > copying '" + d[0] + "' to '" + d[1] + "'") util.copytree(d[0], dest) else: print(" > WARNING: '" + d[1] + "' doesn't exist") # build PFS container build_pfs(config_base_name, g_args.verbose) # set up cloner environment cloner_adf = util.path("data", "cloner", "boot.adf") cloner_cfg = util.path("data", "cloner", "template.fs-uae") clone_script = util.path(os.path.dirname(g_args.config_file), config_base_name) + ".clonescript" if util.is_file(cloner_adf) and util.is_file( cloner_cfg) and util.is_file(clone_script): if g_args.verbose: print("copying cloner config...") shutil.copyfile(cloner_adf, util.path(g_clone_dir, "boot.adf")) # create config from template with open(cloner_cfg, 'r') as f: cfg = f.read() cfg = cfg.replace("<config_base_name>", config_base_name) cfg = cfg.replace("$AGSTEMP", paths.tmp()) cfg = cfg.replace("$AGSDEST", util.path(os.getenv("AGSDEST"))) cfg = cfg.replace("$FSUAEROM", util.path(os.getenv("FSUAEROM"))) open(util.path(g_clone_dir, "cfg.fs-uae"), mode="w").write(cfg) # copy clone script and write fs-uae metadata shutil.copyfile(clone_script, util.path(g_clone_dir, "clone")) open(util.path(g_clone_dir, "clone.uaem"), mode="w").write("-s--rwed 2020-02-02 22:22:22.00") else: print("WARNING: cloner config files not found") # clean output directory for r, _, f in os.walk(g_clone_dir): for name in f: path = util.path(r, name) if name == ".DS_Store": os.remove(path) return 0 # except Exception as err: except IOError as err: print("error - {}".format(err)) sys.exit(1)
def build_pfs(config_base_name, verbose): if verbose: print("building PFS container...") pfs3_bin = util.path("data", "pfs3", "pfs3.bin") if not util.is_file(pfs3_bin): raise IOError("PFS3 filesystem doesn't exist: " + pfs3_bin) if verbose: print(" > calculating partition sizes...") block_size = 512 heads = 4 sectors = 63 cylinder_size = block_size * heads * sectors fs_overhead = 1.0718 num_cyls_rdb = 1 total_cyls = num_cyls_rdb partitions = [] # (partition name, cylinders) for f in sorted(os.listdir(g_clone_dir)): if util.is_dir(util.path(g_clone_dir, f)) and is_amiga_devicename(f): mb_free = 100 if f == "DH0" else 50 cyls = int( fs_overhead * (util.get_dir_size(util.path(g_clone_dir, f), block_size)[2] + (mb_free * 1024 * 1024))) // cylinder_size partitions.append(("DH" + str(len(partitions)), cyls)) total_cyls += cyls out_hdf = util.path(g_out_dir, config_base_name + ".hdf") if util.is_file(out_hdf): os.remove(out_hdf) if verbose: print(" > creating pfs container ({}MB)...".format( (total_cyls * cylinder_size) // (1024 * 1024))) r = subprocess.run([ "rdbtool", out_hdf, "create", "chs={},{},{}".format(total_cyls + 1, heads, sectors), "+", "init", "rdb_cyls={}".format(num_cyls_rdb) ]) if verbose: print(" > adding filesystem...") r = subprocess.run(["rdbtool", out_hdf, "fsadd", pfs3_bin, "fs=PFS3"], stdout=subprocess.PIPE) if verbose: print(" > adding partitions...") # add boot partition part = partitions.pop(0) if verbose: print(" > " + part[0]) r = subprocess.run([ "rdbtool", out_hdf, "add", "name={}".format(part[0]), "start={}".format(num_cyls_rdb), "size={}".format(part[1]), "fs=PFS3", "block_size={}".format(block_size), "max_transfer=0x0001FE00", "mask=0x7FFFFFFE", "num_buffer=300", "bootable=True" ], stdout=subprocess.PIPE) # add subsequent partitions for part in partitions: if verbose: print(" > " + part[0]) r = subprocess.run(["rdbtool", out_hdf, "free"], stdout=subprocess.PIPE, universal_newlines=True) free = make_tuple(r.stdout.splitlines()[0]) free_start = int(free[0]) free_end = int(free[1]) part_start = free_start part_end = part_start + part[1] if part_end > free_end: part_end = free_end r = subprocess.run([ "rdbtool", out_hdf, "add", "name={}".format(part[0]), "start={}".format(part_start), "end={}".format(part_end), "fs=PFS3", "block_size={}".format(block_size), "max_transfer=0x0001FE00", "mask=0x7FFFFFFE", "num_buffer=300" ], stdout=subprocess.PIPE) return
def ags_create_entry(name, entry, path, rank=None, only_script=False, prefix=None, options=None): global g_entry_for_path max_w = AGS_LIST_WIDTH note = None # apply override options if isinstance(options, dict): if entry and isinstance(entry, dict): entry.update(options) elif "note" in options and isinstance(options["note"], str): note = options["note"] if isinstance(entry, dict) and isinstance(entry["note"], str): note = entry["note"] # skip if entry already added at path path_id = "{}{}".format(entry["id"] if (entry and entry["id"]) else name, path) if path_id in g_entry_for_path: return else: g_entry_for_path.add(path_id) # fix path name path_prefix = get_ags2_dir() if path != path_prefix: path_suffix = path.split(path_prefix + "/")[-1] path = path_prefix + "/" + "/".join( list(map(ags_fix_filename, path_suffix.split("/")))) # base name title = rank + ". " if rank else "" if prefix: title = prefix + " - " + title if options and options.get("unavailable", False): title += ags_fix_filename(name.replace("-", " ")) elif entry and "title_short" in entry: if only_script: title = ags_fix_filename(entry["title_short"]).replace(" ", "_") else: title += ags_fix_filename(entry["title_short"]) if len(title) > max_w: title = title.replace(", The", "") else: title += name # prevent name clash title = title.strip() if util.is_file(util.path(path, title) + ".run"): if entry.get("category", "").lower() == "demo": title += " (" + entry.get("publisher") + ")" else: title += " (" + entry.get("hardware", "").replace( "/ECS", "").replace("AGA/CD32", "CD32").replace( "OCS/CDTV", "CDTV").replace("/", "-") + ")" if only_script: title = title.replace(" ", "_") if len(title) > max_w: title = title[:max_w - 2].strip() + ".." if util.is_file(util.path(path, title) + ".run"): suffix = 1 while suffix <= 10: title = title[:-1] + str(suffix) suffix += 1 if not util.is_file(util.path(path, title) + ".run"): break base_path = util.path(path, title) util.make_dir(path) # create runfile runfile = None if get_amiga_whd_dir(entry) is not None or entry_is_notwhdl(entry): # videomode whd_vmode = "NTSC" if util.parse_int(entry.get("ntsc", 0)) > 0 else "PAL" if g_args.ntsc: whd_vmode = "NTSC" # vadjust vadjust_scale = util.parse_int(entry.get("scale", 0)) if not vadjust_scale: vadjust_scale = 0 vadjust_vofs = util.parse_int(entry.get("v_offset", 0)) if not vadjust_vofs: vadjust_vofs = 0 vadjust_vofs = min(max(vadjust_vofs, VADJUST_MIN), VADJUST_MAX) if entry_is_notwhdl(entry): runfile_path = get_archive_path(entry).replace(".lha", ".run") if util.is_file(runfile_path): runfile = "set{}\n".format(whd_vmode.lower()) runfile += "setvadjust {} {}\n".format(vadjust_vofs, vadjust_scale) with open(runfile_path, 'r') as f: runfile += f.read() runfile += "setvmode $AGSVMode\n" runfile += "setvadjust\n" else: whd_entrypath = get_amiga_whd_dir(entry) if whd_entrypath: whd_slave = get_whd_slavename(entry) # extra arguments whd_cargs = "BUTTONWAIT" if entry.get("slave_args"): whd_cargs += " " + entry["slave_args"] whd_qtkey = "" if "QuitKey=" in whd_cargs else "$whdlqtkey" runfile = "cd \"{}\"\n".format(whd_entrypath) runfile += "IF NOT EXISTS ENV:whdlspdly\n" runfile += " echo 200 >ENV:whdlspdly\n" runfile += "ENDIF\n" runfile += "IF NOT EXISTS ENV:whdlqtkey\n" runfile += " echo \"\" >ENV:whdlqtkey\n" runfile += "ENDIF\n" runfile += "IF EXISTS ENV:whdlvmode\n" runfile += " whdload >NIL: \"{}\" $whdlvmode {} SplashDelay=$whdlspdly {}\n".format( whd_slave, whd_cargs, whd_qtkey) runfile += "ELSE\n" runfile += " setvadjust {} {}\n".format( vadjust_vofs, vadjust_scale) runfile += " whdload >NIL: \"{}\" {} {} SplashDelay=$whdlspdly {}\n".format( whd_slave, whd_vmode, whd_cargs, whd_qtkey) runfile += " setvadjust\n" runfile += "ENDIF\n" else: runfile = "echo \"Title not available.\"" + "\n" + "wait 2" if runfile: if util.is_file(base_path + ".run"): print(" > AGS2 clash:", entry["id"], "-", base_path + ".run") else: open(base_path + ".run", mode="w", encoding="latin-1").write(runfile) if only_script: return # note if options and options.get("unavailable", False): note = "Title: " + name.replace("-", " ") + "\n\n" note += "Content is unavailable." open(base_path + ".txt", mode="w", encoding="latin-1").write(note) elif entry: open(base_path + ".txt", mode="w", encoding="latin-1").write(ags_make_note(entry, note)) # image if not g_args.no_img and entry and "id" in entry and util.is_file( util.path("data", "img", entry["id"] + ".iff")): shutil.copyfile(util.path("data", "img", entry["id"] + ".iff"), base_path + ".iff") return
def get_archive_path(entry): if entry_valid(entry): arc_path = util.path(paths.titles(), entry["archive_path"]) return arc_path if util.is_file(arc_path) else None else: return None
def main(): parser = argparse.ArgumentParser() parser.add_argument( "--make-sqlite", dest="make_sqlite", action="store_true", default=False, help= "make sqlite db from cvs, if none none exists or if cvs is newer than existing" ) parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose output") try: args = parser.parse_args() db = util.get_db(args.verbose) if args.make_sqlite: db.close() return 0 arc_dir = os.path.join("data", "whdl") if not util.is_dir(arc_dir): raise IOError("whdl archive dir missing:", arc_dir) # remove missing archive_paths from db for r in db.cursor().execute("SELECT * FROM titles"): if r["archive_path"] and not util.is_file(r["archive_path"]): print("archive removed:", r["id"]) print(" >>", r["archive_path"]) db.cursor().execute( "UPDATE titles SET archive_path=NULL,slave_path=NULL,slave_version=NULL WHERE id=?;", (r["id"], )) print() # enumerate whdl archives, correlate with db for _, arc in index_whdload_archives(arc_dir).items(): rows = db.cursor().execute( "SELECT * FROM titles WHERE (id = ?) OR (id LIKE ?);", ( arc["id"], arc["id"] + '--%', )).fetchall() if not rows: print("no db entry:", arc["archive_path"]) print(" >>", arc["id"]) print() continue for row in rows: if not row["archive_path"]: db.cursor().execute( "UPDATE titles SET archive_path=?,slave_path=?,slave_version=? WHERE id=?;", (arc["archive_path"], arc["slave_path"], arc["slave_version"], row["id"])) print("archive added: " + arc["archive_path"] + " -> " + row["id"]) print() # list more missing stuff if args.verbose: for r in db.cursor().execute("SELECT * FROM titles"): if not util.is_file("data/img/" + r["id"] + ".iff"): print("missing image:", r["id"]) print() db.commit() db.close() return 0 # except Exception as err: except IOError as err: print("error - {}".format(err)) sys.exit(1)
def ags_create_entry(name, entry, path, note, rank, only_script=False, prefix=None): max_w = AGS_LIST_WIDTH # fix path name path_prefix = get_ags2_dir() if path != path_prefix: path_suffix = path.split(path_prefix + "/")[-1] path = path_prefix + "/" + "/".join( list(map(ags_fix_filename, path_suffix.split("/")))) # base name title = rank + ". " if rank else "" if prefix: title = prefix + " - " + title if note and note == "not_available": title += ags_fix_filename(name.replace("-", " ")) elif entry and "title_short" in entry: if only_script: title = ags_fix_filename(entry["title_short"]).replace(" ", "_") else: title += ags_fix_filename(entry["title_short"]) if len(title) > max_w: title = title.replace(", The", "") else: title += name # prevent name clash title = title.strip() if util.is_file(os.path.join(path, title) + ".run"): if entry["category"] == "Demo": title += " (" + entry["publisher"] + ")" else: title += " (" + entry["hardware"].replace("/ECS", "").replace( "AGA/CD32", "CD32").replace("OCS/CDTV", "CDTV").replace( "/", "-") + ")" if only_script: title = title.replace(" ", "_") if len(title) > max_w: title = title[:max_w - 2].strip() + ".." if util.is_file(os.path.join(path, title) + ".run"): suffix = 1 while suffix <= 10: title = title[:-1] + str(suffix) suffix += 1 if not util.is_file(os.path.join(path, title) + ".run"): break base_path = os.path.join(path, title) util.make_dir(path) # runfile if entry_is_notwhdl(entry): shutil.copyfile(entry["archive_path"].replace(".lha", ".run"), base_path + ".run") else: whd_entrypath = get_amiga_whd_dir(entry) runfile = None if whd_entrypath: whd_vmode = "NTSC" if entry["ntsc"] > 0 else "PAL" if g_args.ntsc: whd_vmode = "NTSC" whd_slave = get_whd_slavename(entry) whd_cargs = "BUTTONWAIT" if entry["slave_args"]: whd_cargs += " " + entry["slave_args"] runfile = "cd \"{}\"\n".format(whd_entrypath) runfile += "IF NOT EXISTS ENV:whdlspdly\n" runfile += " echo 200 >ENV:whdlspdly\n" runfile += "ENDIF\n" runfile += "IF NOT EXISTS ENV:whdlqtkey\n" runfile += " echo \"\" >ENV:whdlqtkey\n" runfile += "ENDIF\n" runfile += "IF EXISTS ENV:whdlvmode\n" runfile += " whdload >NIL: \"{}\" PRELOAD $whdlvmode {} SplashDelay=$whdlspdly $whdlqtkey\n".format( whd_slave, whd_cargs) runfile += "ELSE\n" runfile += " whdload >NIL: \"{}\" PRELOAD {} {} SplashDelay=$whdlspdly $whdlqtkey\n".format( whd_slave, whd_vmode, whd_cargs) runfile += "ENDIF\n" else: runfile = "echo \"Title not available.\"" + "\n" + "wait 2" if runfile: if util.is_file(base_path + ".run"): print(" > AGS2 clash:", entry["id"], "-", base_path + ".run") else: open(base_path + ".run", mode="w", encoding="latin-1").write(runfile) if only_script: return # note if note and note == "not_available": note = "Title: " + name.replace("-", " ") + "\n\n" note += "WHDLoader not available" open(base_path + ".txt", mode="w", encoding="latin-1").write(note) elif entry: open(base_path + ".txt", mode="w", encoding="latin-1").write(ags_make_note(entry, note)) # image if not g_args.no_img and entry and "id" in entry and util.is_file( os.path.join("data", "img", entry["id"] + ".iff")): shutil.copyfile(os.path.join("data", "img", entry["id"] + ".iff"), base_path + ".iff") return