def create_vadjust_dats(): util.make_dir(util.path(get_boot_dir(), "S", "vadjust_dat")) for i in range(VADJUST_MIN, VADJUST_MAX + 1): open(util.path(get_boot_dir(), "S", "vadjust_dat", "xd_{}".format(i)), mode="wb").write(make_vadjust(i)) open(util.path(get_boot_dir(), "S", "vadjust_dat", "x5_{}".format(i)), mode="wb").write(make_vadjust(i, 5)) open(util.path(get_boot_dir(), "S", "vadjust_dat", "x6_{}".format(i)), mode="wb").write(make_vadjust(i, 6))
def get_whd_dir(entry): if entry_is_notwhdl(entry): return util.path(g_clone_dir, "DH1", "WHD", "N") else: p = "0-9" if entry["slave_dir"][0].isnumeric( ) else entry["slave_dir"][0].upper() if entry["id"].startswith("demo--"): return util.path(g_clone_dir, "DH1", "WHD", "D", p) if entry["id"].startswith("mags--"): return util.path(g_clone_dir, "DH1", "WHD", "M", p) else: return util.path(g_clone_dir, "DH1", "WHD", "G", p)
def verify(): vars = ["AGSCONTENT", "AGSDEST", "AGSTEMP", "FSUAEBIN", "FSUAEROM"] for var in vars: if os.getenv(var) is None: raise IOError( "missing {} environment variable - check .env!".format(var)) if not util.is_dir(content()): raise IOError("AGSCONTENT is not a directory - check .env!") if not util.is_file(util.path(os.getenv("FSUAEBIN"))): raise IOError("FSUAEBIN is not a file - check .env!") if not util.is_file(util.path(os.getenv("FSUAEROM"))): raise IOError("FSUAEROM is not a file - check .env!") return True
def add_demo(entry, sort_group, sort_country): if sort_group.startswith("The "): sort_group = sort_group[4:] sort_group = sort_group[:AGS_LIST_WIDTH] group_letter = sort_group[0].upper() if group_letter.isnumeric(): group_letter = "0-9" if entry.get("subcategory", "").lower().startswith("disk mag"): ags_create_entry(None, entry, util.path(d_path, "[ Disk Magazines ].ags")) elif entry.get("subcategory", "").lower().startswith("music disk"): ags_create_entry( None, entry, util.path(d_path, "[ Music Disks by title ].ags", letter + ".ags")) ags_create_entry( None, entry, util.path(d_path, "[ Music Disks by year ].ags", year + ".ags")) else: if entry.get("subcategory", "").lower().startswith("crack"): ags_create_entry(None, entry, util.path(d_path, "[ Demos, crack intros ].ags"), prefix=sort_group) if entry.get("subcategory", "").lower().startswith("intro"): ags_create_entry( None, entry, util.path(d_path, "[ Demos, 1-64KB ].ags")) group_entry = dict(entry) group_entry["title_short"] = group_entry.get("title") ags_create_entry( None, entry, util.path(d_path, "[ Demos by title ].ags", letter + ".ags")) ags_create_entry(None, group_entry, util.path(d_path, "[ Demos by group ].ags", group_letter + ".ags"), prefix=sort_group) ags_create_entry( None, entry, util.path(d_path, "[ Demos by year ].ags", year + ".ags")) if sort_country: ags_create_entry( None, entry, util.path(d_path, "[ Demos by country ].ags", sort_country + ".ags"))
def index_whdload_archives(basedir): basedir += os.sep print("enumerating archives..", end="", flush=True) count = 0 d = {} for r, _, f in os.walk(basedir): for file in f: if file.endswith(".lha"): count += 1 if count % 100 == 0: print(".", end="", flush=True) path = util.path(r, file) db_path = path.split(basedir)[1] slave_category = db_path.split(os.sep)[0] if slave_category in ["game", "demo", "mags"]: arc = LhaFile(path) for n in arc.namelist(): n = n.replace("\\", "/") if n.lower().endswith(".slave"): if len(n.split("/")) > 2: pass # skip slaves beneath root else: slave_id = slave_category + "--" + n[:-6].replace( "/", "--").lower() slave_ver = "v1.0" try: verstr = file[:-4].split("_")[1] if verstr.startswith("v"): slave_ver = verstr except Exception: pass d[slave_id] = { "id": slave_id, "archive_path": db_path, "slave_path": n, "slave_version": slave_ver } elif slave_category in [ "game-notwhdl", "demo-notwhdl", "mags-notwhdl" ]: slave_id = slave_category + "--" + os.path.splitext( os.path.basename(path))[0].lower() if util.is_file(path.replace(".lha", ".run")): d[slave_id] = { "id": slave_id, "archive_path": db_path, "slave_path": None, "slave_version": None } print("\n", flush=True) return d
def extract_whd(entry): arc_path = get_archive_path(entry) if not arc_path: print(" > WARNING: content archive not found:", entry["id"]) else: dest = get_whd_dir(entry) if entry_is_notwhdl(entry): util.lha_extract(arc_path, dest) else: util.lha_extract(arc_path, dest) info_path = util.path(dest, entry["slave_dir"] + ".info") if util.is_file(info_path): os.remove(info_path)
def ags_create_entries(entries, path, note=None, ranked_list=False): global g_entries # make dir base_dir = get_ags2_dir() if path: for d in path: base_dir = util.path(base_dir, d[:26].strip() + ".ags") util.make_dir(base_dir) # make note if note: note = "\n".join([ textwrap.fill(p, AGS_INFO_WIDTH) for p in note.replace("\\n", "\n").splitlines() ]) open(base_dir[:-4] + ".txt", mode="w", encoding="latin-1").write(note) # collect titles pos = 0 for name in entries: pos += 1 n = name options = None if isinstance(name, tuple) and len(name) == 2 and isinstance( name[1], dict): n = name[0] options = name[1] # use preferred (fuzzy) entry e, pe = get_entry(n) if not "--" in name and pe: e = pe if not e and not pe: if options is None or (options and not options.get("unavailable", False)): print(" > WARNING: invalid entry: {}".format(n)) else: g_entries[e["id"]] = e rank = None if ranked_list: rank = str(pos).zfill(len(str(len(entries)))) ags_create_entry(n, e, base_dir, rank=rank, options=options) return
def ags_create_entry(name, entry, path, rank=None, only_script=False, prefix=None, options=None): global g_entry_for_path max_w = AGS_LIST_WIDTH note = None # apply override options if isinstance(options, dict): if entry and isinstance(entry, dict): entry.update(options) elif "note" in options and isinstance(options["note"], str): note = options["note"] if isinstance(entry, dict) and isinstance(entry["note"], str): note = entry["note"] # skip if entry already added at path path_id = "{}{}".format(entry["id"] if (entry and entry["id"]) else name, path) if path_id in g_entry_for_path: return else: g_entry_for_path.add(path_id) # fix path name path_prefix = get_ags2_dir() if path != path_prefix: path_suffix = path.split(path_prefix + "/")[-1] path = path_prefix + "/" + "/".join( list(map(ags_fix_filename, path_suffix.split("/")))) # base name title = rank + ". " if rank else "" if prefix: title = prefix + " - " + title if options and options.get("unavailable", False): title += ags_fix_filename(name.replace("-", " ")) elif entry and "title_short" in entry: if only_script: title = ags_fix_filename(entry["title_short"]).replace(" ", "_") else: title += ags_fix_filename(entry["title_short"]) if len(title) > max_w: title = title.replace(", The", "") else: title += name # prevent name clash title = title.strip() if util.is_file(util.path(path, title) + ".run"): if entry.get("category", "").lower() == "demo": title += " (" + entry.get("publisher") + ")" else: title += " (" + entry.get("hardware", "").replace( "/ECS", "").replace("AGA/CD32", "CD32").replace( "OCS/CDTV", "CDTV").replace("/", "-") + ")" if only_script: title = title.replace(" ", "_") if len(title) > max_w: title = title[:max_w - 2].strip() + ".." if util.is_file(util.path(path, title) + ".run"): suffix = 1 while suffix <= 10: title = title[:-1] + str(suffix) suffix += 1 if not util.is_file(util.path(path, title) + ".run"): break base_path = util.path(path, title) util.make_dir(path) # create runfile runfile = None if get_amiga_whd_dir(entry) is not None or entry_is_notwhdl(entry): # videomode whd_vmode = "NTSC" if util.parse_int(entry.get("ntsc", 0)) > 0 else "PAL" if g_args.ntsc: whd_vmode = "NTSC" # vadjust vadjust_scale = util.parse_int(entry.get("scale", 0)) if not vadjust_scale: vadjust_scale = 0 vadjust_vofs = util.parse_int(entry.get("v_offset", 0)) if not vadjust_vofs: vadjust_vofs = 0 vadjust_vofs = min(max(vadjust_vofs, VADJUST_MIN), VADJUST_MAX) if entry_is_notwhdl(entry): runfile_path = get_archive_path(entry).replace(".lha", ".run") if util.is_file(runfile_path): runfile = "set{}\n".format(whd_vmode.lower()) runfile += "setvadjust {} {}\n".format(vadjust_vofs, vadjust_scale) with open(runfile_path, 'r') as f: runfile += f.read() runfile += "setvmode $AGSVMode\n" runfile += "setvadjust\n" else: whd_entrypath = get_amiga_whd_dir(entry) if whd_entrypath: whd_slave = get_whd_slavename(entry) # extra arguments whd_cargs = "BUTTONWAIT" if entry.get("slave_args"): whd_cargs += " " + entry["slave_args"] whd_qtkey = "" if "QuitKey=" in whd_cargs else "$whdlqtkey" runfile = "cd \"{}\"\n".format(whd_entrypath) runfile += "IF NOT EXISTS ENV:whdlspdly\n" runfile += " echo 200 >ENV:whdlspdly\n" runfile += "ENDIF\n" runfile += "IF NOT EXISTS ENV:whdlqtkey\n" runfile += " echo \"\" >ENV:whdlqtkey\n" runfile += "ENDIF\n" runfile += "IF EXISTS ENV:whdlvmode\n" runfile += " whdload >NIL: \"{}\" $whdlvmode {} SplashDelay=$whdlspdly {}\n".format( whd_slave, whd_cargs, whd_qtkey) runfile += "ELSE\n" runfile += " setvadjust {} {}\n".format( vadjust_vofs, vadjust_scale) runfile += " whdload >NIL: \"{}\" {} {} SplashDelay=$whdlspdly {}\n".format( whd_slave, whd_vmode, whd_cargs, whd_qtkey) runfile += " setvadjust\n" runfile += "ENDIF\n" else: runfile = "echo \"Title not available.\"" + "\n" + "wait 2" if runfile: if util.is_file(base_path + ".run"): print(" > AGS2 clash:", entry["id"], "-", base_path + ".run") else: open(base_path + ".run", mode="w", encoding="latin-1").write(runfile) if only_script: return # note if options and options.get("unavailable", False): note = "Title: " + name.replace("-", " ") + "\n\n" note += "Content is unavailable." open(base_path + ".txt", mode="w", encoding="latin-1").write(note) elif entry: open(base_path + ".txt", mode="w", encoding="latin-1").write(ags_make_note(entry, note)) # image if not g_args.no_img and entry and "id" in entry and util.is_file( util.path("data", "img", entry["id"] + ".iff")): shutil.copyfile(util.path("data", "img", entry["id"] + ".iff"), base_path + ".iff") return
def tmp(): return util.path(os.getenv("AGSTEMP"))
def titles(): return util.path(content(), "titles")
def content(): return util.path(os.getenv("AGSCONTENT"))
def main(): global g_args, g_db, g_out_dir, g_clone_dir parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", dest="config_file", required=True, metavar="FILE", type=lambda x: util.argparse_is_file(parser, x), help="configuration file") parser.add_argument("-o", "--out_dir", dest="out_dir", metavar="FILE", type=lambda x: util.argparse_is_dir(parser, x), help="output directory") parser.add_argument("-b", "--base_hdf", dest="base_hdf", metavar="FILE", help="base HDF image") parser.add_argument("-a", "--ags_dir", dest="ags_dir", metavar="FILE", type=lambda x: util.argparse_is_dir(parser, x), help="AGS2 configuration directory") parser.add_argument( "-d", "--add_dir", dest="add_dirs", action="append", help="add dir to amiga filesystem (example 'DH1:Music::~/Amiga/Music')" ) parser.add_argument("--all_games", dest="all_games", action="store_true", default=False, help="include all games in database") parser.add_argument("--all_demos", dest="all_demos", action="store_true", default=False, help="include all demos in database") parser.add_argument( "--all_versions", dest="all_versions", action="store_true", default=False, help="include all non-redundant versions of titles (if --all_games)") parser.add_argument("--no_autolists", dest="no_autolists", action="store_true", default=False, help="don't add any auto-lists") parser.add_argument("--no_img", dest="no_img", action="store_true", default=False, help="don't copy screenshots") parser.add_argument("--ecs_versions", dest="ecs", action="store_true", default=False, help="prefer OCS/ECS versions (if --all_games)") parser.add_argument("--force_ntsc", dest="ntsc", action="store_true", default=False, help="force NTSC video mode") parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose output") try: paths.verify() g_args = parser.parse_args() g_db = util.get_db(g_args.verbose) if g_args.out_dir: g_out_dir = g_args.out_dir g_clone_dir = util.path(g_out_dir, "tmp") if util.is_dir(g_clone_dir): shutil.rmtree(g_clone_dir) util.make_dir(util.path(g_clone_dir, "DH0")) config_base_name = os.path.splitext( os.path.basename(g_args.config_file))[0] data_dir = "data" if not util.is_dir(data_dir): raise IOError("data dir doesn't exist: " + data_dir) # extract base image base_hdf = g_args.base_hdf if not base_hdf: base_hdf = util.path(paths.content(), "base", "base.hdf") if not util.is_file(base_hdf): raise IOError("base HDF doesn't exist: " + base_hdf) if g_args.verbose: print("extracting base HDF image... ({})".format(base_hdf)) extract_base_image(base_hdf, get_boot_dir()) # parse menu menu = None if g_args.verbose: print("parsing menu...") menu = util.yaml_load(g_args.config_file) if not isinstance(menu, list): raise ValueError("config file not a list: " + g_args.config_file) # copy base AGS2 config, create database if g_args.verbose: print("building AGS2 database...") base_ags2 = g_args.ags_dir if not base_ags2: base_ags2 = util.path("data", "ags2") if not util.is_dir(base_ags2): raise IOError("AGS2 configuration directory doesn't exist: " + base_ags2) if g_args.verbose: print(" > using configuration: " + base_ags2) util.copytree(base_ags2, get_ags2_dir()) if menu: ags_create_tree(menu) if g_args.all_games: ags_add_all("Game") if g_args.all_demos: ags_add_all("Demo") ags_add_all("Mags") if not g_args.no_autolists: ags_create_autoentries() create_vadjust_dats() # extract whdloaders if g_args.verbose: print("extracting {} content archives...".format( len(g_entries.items()))) extract_entries(g_entries) # copy extra files config_extra_dir = util.path(os.path.dirname(g_args.config_file), config_base_name) if util.is_dir(config_extra_dir): if g_args.verbose: print("copying configuration extras...") util.copytree(config_extra_dir, g_clone_dir) # copy additional directories if g_args.add_dirs: if g_args.verbose: print("copying additional directories...") for s in g_args.add_dirs: d = s.split("::") if util.is_dir(d[0]): dest = util.path(g_clone_dir, d[1].replace(":", "/")) print(" > copying '" + d[0] + "' to '" + d[1] + "'") util.copytree(d[0], dest) else: print(" > WARNING: '" + d[1] + "' doesn't exist") # build PFS container build_pfs(config_base_name, g_args.verbose) # set up cloner environment cloner_adf = util.path("data", "cloner", "boot.adf") cloner_cfg = util.path("data", "cloner", "template.fs-uae") clone_script = util.path(os.path.dirname(g_args.config_file), config_base_name) + ".clonescript" if util.is_file(cloner_adf) and util.is_file( cloner_cfg) and util.is_file(clone_script): if g_args.verbose: print("copying cloner config...") shutil.copyfile(cloner_adf, util.path(g_clone_dir, "boot.adf")) # create config from template with open(cloner_cfg, 'r') as f: cfg = f.read() cfg = cfg.replace("<config_base_name>", config_base_name) cfg = cfg.replace("$AGSTEMP", paths.tmp()) cfg = cfg.replace("$AGSDEST", util.path(os.getenv("AGSDEST"))) cfg = cfg.replace("$FSUAEROM", util.path(os.getenv("FSUAEROM"))) open(util.path(g_clone_dir, "cfg.fs-uae"), mode="w").write(cfg) # copy clone script and write fs-uae metadata shutil.copyfile(clone_script, util.path(g_clone_dir, "clone")) open(util.path(g_clone_dir, "clone.uaem"), mode="w").write("-s--rwed 2020-02-02 22:22:22.00") else: print("WARNING: cloner config files not found") # clean output directory for r, _, f in os.walk(g_clone_dir): for name in f: path = util.path(r, name) if name == ".DS_Store": os.remove(path) return 0 # except Exception as err: except IOError as err: print("error - {}".format(err)) sys.exit(1)
def build_pfs(config_base_name, verbose): if verbose: print("building PFS container...") pfs3_bin = util.path("data", "pfs3", "pfs3.bin") if not util.is_file(pfs3_bin): raise IOError("PFS3 filesystem doesn't exist: " + pfs3_bin) if verbose: print(" > calculating partition sizes...") block_size = 512 heads = 4 sectors = 63 cylinder_size = block_size * heads * sectors fs_overhead = 1.0718 num_cyls_rdb = 1 total_cyls = num_cyls_rdb partitions = [] # (partition name, cylinders) for f in sorted(os.listdir(g_clone_dir)): if util.is_dir(util.path(g_clone_dir, f)) and is_amiga_devicename(f): mb_free = 100 if f == "DH0" else 50 cyls = int( fs_overhead * (util.get_dir_size(util.path(g_clone_dir, f), block_size)[2] + (mb_free * 1024 * 1024))) // cylinder_size partitions.append(("DH" + str(len(partitions)), cyls)) total_cyls += cyls out_hdf = util.path(g_out_dir, config_base_name + ".hdf") if util.is_file(out_hdf): os.remove(out_hdf) if verbose: print(" > creating pfs container ({}MB)...".format( (total_cyls * cylinder_size) // (1024 * 1024))) r = subprocess.run([ "rdbtool", out_hdf, "create", "chs={},{},{}".format(total_cyls + 1, heads, sectors), "+", "init", "rdb_cyls={}".format(num_cyls_rdb) ]) if verbose: print(" > adding filesystem...") r = subprocess.run(["rdbtool", out_hdf, "fsadd", pfs3_bin, "fs=PFS3"], stdout=subprocess.PIPE) if verbose: print(" > adding partitions...") # add boot partition part = partitions.pop(0) if verbose: print(" > " + part[0]) r = subprocess.run([ "rdbtool", out_hdf, "add", "name={}".format(part[0]), "start={}".format(num_cyls_rdb), "size={}".format(part[1]), "fs=PFS3", "block_size={}".format(block_size), "max_transfer=0x0001FE00", "mask=0x7FFFFFFE", "num_buffer=300", "bootable=True" ], stdout=subprocess.PIPE) # add subsequent partitions for part in partitions: if verbose: print(" > " + part[0]) r = subprocess.run(["rdbtool", out_hdf, "free"], stdout=subprocess.PIPE, universal_newlines=True) free = make_tuple(r.stdout.splitlines()[0]) free_start = int(free[0]) free_end = int(free[1]) part_start = free_start part_end = part_start + part[1] if part_end > free_end: part_end = free_end r = subprocess.run([ "rdbtool", out_hdf, "add", "name={}".format(part[0]), "start={}".format(part_start), "end={}".format(part_end), "fs=PFS3", "block_size={}".format(block_size), "max_transfer=0x0001FE00", "mask=0x7FFFFFFE", "num_buffer=300" ], stdout=subprocess.PIPE) return
def get_archive_path(entry): if entry_valid(entry): arc_path = util.path(paths.titles(), entry["archive_path"]) return arc_path if util.is_file(arc_path) else None else: return None
def ags_create_autoentries(): path = get_ags2_dir() d_path = get_ags2_dir() if util.is_dir(util.path(path, "[ Demo Scene ].ags")): d_path = util.path(path, "[ Demo Scene ].ags") for entry in sorted(g_entries.values(), key=operator.itemgetter("title")): letter = entry.get("title_short", "z")[0].upper() if letter.isnumeric(): letter = "0-9" year = entry["year"] if "x" in year.lower(): year = "Unknown" # Games if entry.get("category", "").lower() == "game": ags_create_entry( None, entry, util.path(path, "[ All Games ].ags", letter + ".ags")) ags_create_entry( None, entry, util.path(path, "[ All Games, by year ].ags", year + ".ags")) # Demos / Music Disks / Disk Mags def add_demo(entry, sort_group, sort_country): if sort_group.startswith("The "): sort_group = sort_group[4:] sort_group = sort_group[:AGS_LIST_WIDTH] group_letter = sort_group[0].upper() if group_letter.isnumeric(): group_letter = "0-9" if entry.get("subcategory", "").lower().startswith("disk mag"): ags_create_entry(None, entry, util.path(d_path, "[ Disk Magazines ].ags")) elif entry.get("subcategory", "").lower().startswith("music disk"): ags_create_entry( None, entry, util.path(d_path, "[ Music Disks by title ].ags", letter + ".ags")) ags_create_entry( None, entry, util.path(d_path, "[ Music Disks by year ].ags", year + ".ags")) else: if entry.get("subcategory", "").lower().startswith("crack"): ags_create_entry(None, entry, util.path(d_path, "[ Demos, crack intros ].ags"), prefix=sort_group) if entry.get("subcategory", "").lower().startswith("intro"): ags_create_entry( None, entry, util.path(d_path, "[ Demos, 1-64KB ].ags")) group_entry = dict(entry) group_entry["title_short"] = group_entry.get("title") ags_create_entry( None, entry, util.path(d_path, "[ Demos by title ].ags", letter + ".ags")) ags_create_entry(None, group_entry, util.path(d_path, "[ Demos by group ].ags", group_letter + ".ags"), prefix=sort_group) ags_create_entry( None, entry, util.path(d_path, "[ Demos by year ].ags", year + ".ags")) if sort_country: ags_create_entry( None, entry, util.path(d_path, "[ Demos by country ].ags", sort_country + ".ags")) if g_args.all_demos and entry.get("category", "").lower() == "demo": groups = entry.get("publisher") if not groups: continue for sort_group in groups.split(", "): countries = entry.get("country") if not countries: add_demo(entry, sort_group, None) else: for sort_country in countries.split(", "): add_demo(entry, sort_group, sort_country) # Run-scripts for randomizer if entry.get("category", "").lower() == "game" and not entry.get("issues"): ags_create_entry(None, entry, util.path(path, "Run"), only_script=True) # Notes for created directories if util.is_dir(util.path(path, "[ All Games ].ags")): open(util.path(path, "[ All Games ].txt"), mode="w", encoding="latin-1").write("Browse all games alphabetically.") if util.is_dir(util.path(path, "[ All Games, by year ].ags")): open(util.path(path, "[ All Games, by year ].txt"), mode="w", encoding="latin-1").write("Browse all games by release year.") if util.is_dir(util.path(d_path, "[ Demos by group ].ags")): open(util.path(d_path, "[ Demos by group ].txt"), mode="w", encoding="latin-1").write("Browse demos by release group.") if util.is_dir(util.path(d_path, "[ Demos by country ].ags")): open(util.path(d_path, "[ Demos by country ].txt"), mode="w", encoding="latin-1").write("Browse demos by country of origin.") if util.is_dir(util.path(d_path, "[ Demos by title ].ags")): open(util.path(d_path, "[ Demos by title ].txt"), mode="w", encoding="latin-1").write("Browse demos by title.") if util.is_dir(util.path(d_path, "[ Demos by year ].ags")): open(util.path(d_path, "[ Demos by year ].txt"), mode="w", encoding="latin-1").write("Browse demos by release year.") if util.is_dir(util.path(d_path, "[ Demos, 1-64KB ].ags")): open(util.path(d_path, "[ Demos, 1-64KB ].txt"), mode="w", encoding="latin-1").write( "Browse demos in the 1/4/40/64KB categories.") if util.is_dir(util.path(d_path, "[ Demos, crack intros ].ags")): open(util.path(d_path, "[ Demos, crack intros ].txt"), mode="w", encoding="latin-1").write( "A glimpse into the origins of the demo scene.") if util.is_dir(util.path(d_path, "[ Disk Magazines ].ags")): open(util.path(d_path, "[ Disk Magazines ].txt"), mode="w", encoding="latin-1").write("A selection of scene disk magazines.") if util.is_dir(util.path(d_path, "[ Music Disks by title ].ags")): open(util.path(d_path, "[ Music Disks by title ].txt"), mode="w", encoding="latin-1").write("Browse music disks by title.") if util.is_dir(util.path(d_path, "[ Music Disks by year ].ags")): open(util.path(d_path, "[ Music Disks by year ].txt"), mode="w", encoding="latin-1").write("Browse music disks by year.") if util.is_dir(util.path(path, "[ Issues ].ags")): open( util.path(path, "[ Issues ].txt"), mode="w", encoding="latin-1" ).write( "Titles with known issues on Minimig-AGA.\n(Please report any new or resolved issues!)" )
def main(): parser = argparse.ArgumentParser() parser.add_argument( "--make-sqlite", dest="make_sqlite", action="store_true", default=False, help= "make sqlite db from cvs, if none none exists or if cvs is newer than existing" ) parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose output") try: paths.verify() args = parser.parse_args() db = util.get_db(args.verbose) if args.make_sqlite: db.close() return 0 titles_dir = paths.titles() if not util.is_dir(titles_dir): raise IOError("titles dir missing:", titles_dir) # remove missing archive_paths from db for r in db.cursor().execute("SELECT * FROM titles"): if r["archive_path"] and not util.is_file( util.path(titles_dir, r["archive_path"])): print("archive removed:", r["id"]) print(" >>", r["archive_path"]) db.cursor().execute( "UPDATE titles SET archive_path=NULL,slave_path=NULL,slave_version=NULL WHERE id=?;", (r["id"], )) print() # enumerate whdl archives, correlate with db for _, arc in index_whdload_archives(titles_dir).items(): rows = db.cursor().execute( "SELECT * FROM titles WHERE (id = ?) OR (id LIKE ?);", ( arc["id"], arc["id"] + '--%', )).fetchall() if not rows: print("no db entry:", arc["archive_path"]) print(" >>", arc["id"]) print() continue for row in rows: if not row["archive_path"]: db.cursor().execute( "UPDATE titles SET archive_path=?,slave_path=?,slave_version=? WHERE id=?;", (arc["archive_path"], arc["slave_path"], arc["slave_version"], row["id"])) print("archive added: " + arc["archive_path"] + " -> " + row["id"]) print() # list more missing stuff if args.verbose: for r in db.cursor().execute("SELECT * FROM titles"): if not util.is_file("data/img/" + r["id"] + ".iff"): print("missing image:", r["id"]) print() db.commit() db.close() return 0 # except Exception as err: except IOError as err: print("error - {}".format(err)) sys.exit(1)
def get_boot_dir(): return util.path(g_clone_dir, "DH0")
def get_games_dir(): return util.path(g_clone_dir, "DH1")
def get_ags2_dir(): return util.path(get_boot_dir(), "AGS2")