def verify(): vars = ["AGSCONTENT", "AGSDEST", "AGSTEMP", "FSUAEBIN", "FSUAEROM"] for var in vars: if os.getenv(var) is None: raise IOError( "missing {} environment variable - check .env!".format(var)) if not util.is_dir(content()): raise IOError("AGSCONTENT is not a directory - check .env!") if not util.is_file(util.path(os.getenv("FSUAEBIN"))): raise IOError("FSUAEBIN is not a file - check .env!") if not util.is_file(util.path(os.getenv("FSUAEROM"))): raise IOError("FSUAEROM is not a file - check .env!") return True
def main(): global g_args, g_db, g_out_dir, g_clone_dir parser = argparse.ArgumentParser() parser.add_argument("-c", "--config", dest="config_file", required=True, metavar="FILE", type=lambda x: util.argparse_is_file(parser, x), help="configuration file") parser.add_argument("-o", "--out_dir", dest="out_dir", metavar="FILE", type=lambda x: util.argparse_is_dir(parser, x), help="output directory") parser.add_argument("-b", "--base_hdf", dest="base_hdf", metavar="FILE", help="base HDF image") parser.add_argument("-a", "--ags_dir", dest="ags_dir", metavar="FILE", type=lambda x: util.argparse_is_dir(parser, x), help="AGS2 configuration directory") parser.add_argument( "-d", "--add_dir", dest="add_dirs", action="append", help="add dir to amiga filesystem (example 'DH1:Music::~/Amiga/Music')" ) parser.add_argument("--all_games", dest="all_games", action="store_true", default=False, help="include all games in database") parser.add_argument("--all_demos", dest="all_demos", action="store_true", default=False, help="include all demos in database") parser.add_argument( "--all_versions", dest="all_versions", action="store_true", default=False, help="include all non-redundant versions of titles (if --all_games)") parser.add_argument("--no_autolists", dest="no_autolists", action="store_true", default=False, help="don't add any auto-lists") parser.add_argument("--no_img", dest="no_img", action="store_true", default=False, help="don't copy screenshots") parser.add_argument("--ecs_versions", dest="ecs", action="store_true", default=False, help="prefer OCS/ECS versions (if --all_games)") parser.add_argument("--force_ntsc", dest="ntsc", action="store_true", default=False, help="force NTSC video mode") parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose output") try: paths.verify() g_args = parser.parse_args() g_db = util.get_db(g_args.verbose) if g_args.out_dir: g_out_dir = g_args.out_dir g_clone_dir = util.path(g_out_dir, "tmp") if util.is_dir(g_clone_dir): shutil.rmtree(g_clone_dir) util.make_dir(util.path(g_clone_dir, "DH0")) config_base_name = os.path.splitext( os.path.basename(g_args.config_file))[0] data_dir = "data" if not util.is_dir(data_dir): raise IOError("data dir doesn't exist: " + data_dir) # extract base image base_hdf = g_args.base_hdf if not base_hdf: base_hdf = util.path(paths.content(), "base", "base.hdf") if not util.is_file(base_hdf): raise IOError("base HDF doesn't exist: " + base_hdf) if g_args.verbose: print("extracting base HDF image... ({})".format(base_hdf)) extract_base_image(base_hdf, get_boot_dir()) # parse menu menu = None if g_args.verbose: print("parsing menu...") menu = util.yaml_load(g_args.config_file) if not isinstance(menu, list): raise ValueError("config file not a list: " + g_args.config_file) # copy base AGS2 config, create database if g_args.verbose: print("building AGS2 database...") base_ags2 = g_args.ags_dir if not base_ags2: base_ags2 = util.path("data", "ags2") if not util.is_dir(base_ags2): raise IOError("AGS2 configuration directory doesn't exist: " + base_ags2) if g_args.verbose: print(" > using configuration: " + base_ags2) util.copytree(base_ags2, get_ags2_dir()) if menu: ags_create_tree(menu) if g_args.all_games: ags_add_all("Game") if g_args.all_demos: ags_add_all("Demo") ags_add_all("Mags") if not g_args.no_autolists: ags_create_autoentries() create_vadjust_dats() # extract whdloaders if g_args.verbose: print("extracting {} content archives...".format( len(g_entries.items()))) extract_entries(g_entries) # copy extra files config_extra_dir = util.path(os.path.dirname(g_args.config_file), config_base_name) if util.is_dir(config_extra_dir): if g_args.verbose: print("copying configuration extras...") util.copytree(config_extra_dir, g_clone_dir) # copy additional directories if g_args.add_dirs: if g_args.verbose: print("copying additional directories...") for s in g_args.add_dirs: d = s.split("::") if util.is_dir(d[0]): dest = util.path(g_clone_dir, d[1].replace(":", "/")) print(" > copying '" + d[0] + "' to '" + d[1] + "'") util.copytree(d[0], dest) else: print(" > WARNING: '" + d[1] + "' doesn't exist") # build PFS container build_pfs(config_base_name, g_args.verbose) # set up cloner environment cloner_adf = util.path("data", "cloner", "boot.adf") cloner_cfg = util.path("data", "cloner", "template.fs-uae") clone_script = util.path(os.path.dirname(g_args.config_file), config_base_name) + ".clonescript" if util.is_file(cloner_adf) and util.is_file( cloner_cfg) and util.is_file(clone_script): if g_args.verbose: print("copying cloner config...") shutil.copyfile(cloner_adf, util.path(g_clone_dir, "boot.adf")) # create config from template with open(cloner_cfg, 'r') as f: cfg = f.read() cfg = cfg.replace("<config_base_name>", config_base_name) cfg = cfg.replace("$AGSTEMP", paths.tmp()) cfg = cfg.replace("$AGSDEST", util.path(os.getenv("AGSDEST"))) cfg = cfg.replace("$FSUAEROM", util.path(os.getenv("FSUAEROM"))) open(util.path(g_clone_dir, "cfg.fs-uae"), mode="w").write(cfg) # copy clone script and write fs-uae metadata shutil.copyfile(clone_script, util.path(g_clone_dir, "clone")) open(util.path(g_clone_dir, "clone.uaem"), mode="w").write("-s--rwed 2020-02-02 22:22:22.00") else: print("WARNING: cloner config files not found") # clean output directory for r, _, f in os.walk(g_clone_dir): for name in f: path = util.path(r, name) if name == ".DS_Store": os.remove(path) return 0 # except Exception as err: except IOError as err: print("error - {}".format(err)) sys.exit(1)
def build_pfs(config_base_name, verbose): if verbose: print("building PFS container...") pfs3_bin = util.path("data", "pfs3", "pfs3.bin") if not util.is_file(pfs3_bin): raise IOError("PFS3 filesystem doesn't exist: " + pfs3_bin) if verbose: print(" > calculating partition sizes...") block_size = 512 heads = 4 sectors = 63 cylinder_size = block_size * heads * sectors fs_overhead = 1.0718 num_cyls_rdb = 1 total_cyls = num_cyls_rdb partitions = [] # (partition name, cylinders) for f in sorted(os.listdir(g_clone_dir)): if util.is_dir(util.path(g_clone_dir, f)) and is_amiga_devicename(f): mb_free = 100 if f == "DH0" else 50 cyls = int( fs_overhead * (util.get_dir_size(util.path(g_clone_dir, f), block_size)[2] + (mb_free * 1024 * 1024))) // cylinder_size partitions.append(("DH" + str(len(partitions)), cyls)) total_cyls += cyls out_hdf = util.path(g_out_dir, config_base_name + ".hdf") if util.is_file(out_hdf): os.remove(out_hdf) if verbose: print(" > creating pfs container ({}MB)...".format( (total_cyls * cylinder_size) // (1024 * 1024))) r = subprocess.run([ "rdbtool", out_hdf, "create", "chs={},{},{}".format(total_cyls + 1, heads, sectors), "+", "init", "rdb_cyls={}".format(num_cyls_rdb) ]) if verbose: print(" > adding filesystem...") r = subprocess.run(["rdbtool", out_hdf, "fsadd", pfs3_bin, "fs=PFS3"], stdout=subprocess.PIPE) if verbose: print(" > adding partitions...") # add boot partition part = partitions.pop(0) if verbose: print(" > " + part[0]) r = subprocess.run([ "rdbtool", out_hdf, "add", "name={}".format(part[0]), "start={}".format(num_cyls_rdb), "size={}".format(part[1]), "fs=PFS3", "block_size={}".format(block_size), "max_transfer=0x0001FE00", "mask=0x7FFFFFFE", "num_buffer=300", "bootable=True" ], stdout=subprocess.PIPE) # add subsequent partitions for part in partitions: if verbose: print(" > " + part[0]) r = subprocess.run(["rdbtool", out_hdf, "free"], stdout=subprocess.PIPE, universal_newlines=True) free = make_tuple(r.stdout.splitlines()[0]) free_start = int(free[0]) free_end = int(free[1]) part_start = free_start part_end = part_start + part[1] if part_end > free_end: part_end = free_end r = subprocess.run([ "rdbtool", out_hdf, "add", "name={}".format(part[0]), "start={}".format(part_start), "end={}".format(part_end), "fs=PFS3", "block_size={}".format(block_size), "max_transfer=0x0001FE00", "mask=0x7FFFFFFE", "num_buffer=300" ], stdout=subprocess.PIPE) return
def ags_create_autoentries(): path = get_ags2_dir() d_path = get_ags2_dir() if util.is_dir(util.path(path, "[ Demo Scene ].ags")): d_path = util.path(path, "[ Demo Scene ].ags") for entry in sorted(g_entries.values(), key=operator.itemgetter("title")): letter = entry.get("title_short", "z")[0].upper() if letter.isnumeric(): letter = "0-9" year = entry["year"] if "x" in year.lower(): year = "Unknown" # Games if entry.get("category", "").lower() == "game": ags_create_entry( None, entry, util.path(path, "[ All Games ].ags", letter + ".ags")) ags_create_entry( None, entry, util.path(path, "[ All Games, by year ].ags", year + ".ags")) # Demos / Music Disks / Disk Mags def add_demo(entry, sort_group, sort_country): if sort_group.startswith("The "): sort_group = sort_group[4:] sort_group = sort_group[:AGS_LIST_WIDTH] group_letter = sort_group[0].upper() if group_letter.isnumeric(): group_letter = "0-9" if entry.get("subcategory", "").lower().startswith("disk mag"): ags_create_entry(None, entry, util.path(d_path, "[ Disk Magazines ].ags")) elif entry.get("subcategory", "").lower().startswith("music disk"): ags_create_entry( None, entry, util.path(d_path, "[ Music Disks by title ].ags", letter + ".ags")) ags_create_entry( None, entry, util.path(d_path, "[ Music Disks by year ].ags", year + ".ags")) else: if entry.get("subcategory", "").lower().startswith("crack"): ags_create_entry(None, entry, util.path(d_path, "[ Demos, crack intros ].ags"), prefix=sort_group) if entry.get("subcategory", "").lower().startswith("intro"): ags_create_entry( None, entry, util.path(d_path, "[ Demos, 1-64KB ].ags")) group_entry = dict(entry) group_entry["title_short"] = group_entry.get("title") ags_create_entry( None, entry, util.path(d_path, "[ Demos by title ].ags", letter + ".ags")) ags_create_entry(None, group_entry, util.path(d_path, "[ Demos by group ].ags", group_letter + ".ags"), prefix=sort_group) ags_create_entry( None, entry, util.path(d_path, "[ Demos by year ].ags", year + ".ags")) if sort_country: ags_create_entry( None, entry, util.path(d_path, "[ Demos by country ].ags", sort_country + ".ags")) if g_args.all_demos and entry.get("category", "").lower() == "demo": groups = entry.get("publisher") if not groups: continue for sort_group in groups.split(", "): countries = entry.get("country") if not countries: add_demo(entry, sort_group, None) else: for sort_country in countries.split(", "): add_demo(entry, sort_group, sort_country) # Run-scripts for randomizer if entry.get("category", "").lower() == "game" and not entry.get("issues"): ags_create_entry(None, entry, util.path(path, "Run"), only_script=True) # Notes for created directories if util.is_dir(util.path(path, "[ All Games ].ags")): open(util.path(path, "[ All Games ].txt"), mode="w", encoding="latin-1").write("Browse all games alphabetically.") if util.is_dir(util.path(path, "[ All Games, by year ].ags")): open(util.path(path, "[ All Games, by year ].txt"), mode="w", encoding="latin-1").write("Browse all games by release year.") if util.is_dir(util.path(d_path, "[ Demos by group ].ags")): open(util.path(d_path, "[ Demos by group ].txt"), mode="w", encoding="latin-1").write("Browse demos by release group.") if util.is_dir(util.path(d_path, "[ Demos by country ].ags")): open(util.path(d_path, "[ Demos by country ].txt"), mode="w", encoding="latin-1").write("Browse demos by country of origin.") if util.is_dir(util.path(d_path, "[ Demos by title ].ags")): open(util.path(d_path, "[ Demos by title ].txt"), mode="w", encoding="latin-1").write("Browse demos by title.") if util.is_dir(util.path(d_path, "[ Demos by year ].ags")): open(util.path(d_path, "[ Demos by year ].txt"), mode="w", encoding="latin-1").write("Browse demos by release year.") if util.is_dir(util.path(d_path, "[ Demos, 1-64KB ].ags")): open(util.path(d_path, "[ Demos, 1-64KB ].txt"), mode="w", encoding="latin-1").write( "Browse demos in the 1/4/40/64KB categories.") if util.is_dir(util.path(d_path, "[ Demos, crack intros ].ags")): open(util.path(d_path, "[ Demos, crack intros ].txt"), mode="w", encoding="latin-1").write( "A glimpse into the origins of the demo scene.") if util.is_dir(util.path(d_path, "[ Disk Magazines ].ags")): open(util.path(d_path, "[ Disk Magazines ].txt"), mode="w", encoding="latin-1").write("A selection of scene disk magazines.") if util.is_dir(util.path(d_path, "[ Music Disks by title ].ags")): open(util.path(d_path, "[ Music Disks by title ].txt"), mode="w", encoding="latin-1").write("Browse music disks by title.") if util.is_dir(util.path(d_path, "[ Music Disks by year ].ags")): open(util.path(d_path, "[ Music Disks by year ].txt"), mode="w", encoding="latin-1").write("Browse music disks by year.") if util.is_dir(util.path(path, "[ Issues ].ags")): open( util.path(path, "[ Issues ].txt"), mode="w", encoding="latin-1" ).write( "Titles with known issues on Minimig-AGA.\n(Please report any new or resolved issues!)" )
def main(): parser = argparse.ArgumentParser() parser.add_argument( "--make-sqlite", dest="make_sqlite", action="store_true", default=False, help= "make sqlite db from cvs, if none none exists or if cvs is newer than existing" ) parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose output") try: args = parser.parse_args() db = util.get_db(args.verbose) if args.make_sqlite: db.close() return 0 arc_dir = os.path.join("data", "whdl") if not util.is_dir(arc_dir): raise IOError("whdl archive dir missing:", arc_dir) # remove missing archive_paths from db for r in db.cursor().execute("SELECT * FROM titles"): if r["archive_path"] and not util.is_file(r["archive_path"]): print("archive removed:", r["id"]) print(" >>", r["archive_path"]) db.cursor().execute( "UPDATE titles SET archive_path=NULL,slave_path=NULL,slave_version=NULL WHERE id=?;", (r["id"], )) print() # enumerate whdl archives, correlate with db for _, arc in index_whdload_archives(arc_dir).items(): rows = db.cursor().execute( "SELECT * FROM titles WHERE (id = ?) OR (id LIKE ?);", ( arc["id"], arc["id"] + '--%', )).fetchall() if not rows: print("no db entry:", arc["archive_path"]) print(" >>", arc["id"]) print() continue for row in rows: if not row["archive_path"]: db.cursor().execute( "UPDATE titles SET archive_path=?,slave_path=?,slave_version=? WHERE id=?;", (arc["archive_path"], arc["slave_path"], arc["slave_version"], row["id"])) print("archive added: " + arc["archive_path"] + " -> " + row["id"]) print() # list more missing stuff if args.verbose: for r in db.cursor().execute("SELECT * FROM titles"): if not util.is_file("data/img/" + r["id"] + ".iff"): print("missing image:", r["id"]) print() db.commit() db.close() return 0 # except Exception as err: except IOError as err: print("error - {}".format(err)) sys.exit(1)
def ags_create_autoentries(): path = get_ags2_dir() d_path = get_ags2_dir() if util.is_dir(os.path.join(path, "[ Demo Scene ].ags")): d_path = os.path.join(path, "[ Demo Scene ].ags") for entry in sorted(g_entries.values(), key=operator.itemgetter("title")): letter = entry["title_short"][0].upper() if letter.isnumeric(): letter = "0-9" year = entry["year"] if "x" in year.lower(): year = "Unknown" # Games if entry["category"].lower() == "game": ags_create_entry( None, entry, os.path.join(path, "[ All Games ].ags", letter + ".ags"), None, None) ags_create_entry( None, entry, os.path.join(path, "[ All Games, by year ].ags", year + ".ags"), None, None) # Demos / Disk Mags if g_args.all_demos and entry["category"].lower() == "demo": group = entry["publisher"] if not group: continue if group.startswith("The "): group = group[4:] group = group[:AGS_LIST_WIDTH] group_letter = group[0].upper() if group_letter.isnumeric(): group_letter = "0-9" if entry["subcategory"].lower().startswith("disk mag"): ags_create_entry( None, entry, os.path.join(d_path, "[ Disk Magazines ].ags"), None, None) else: if entry["subcategory"].lower().startswith("crack"): ags_create_entry(None, entry, os.path.join( d_path, "[ Demos, crack intros ].ags"), None, None, prefix=group) if entry["subcategory"].lower().startswith("intro"): ags_create_entry( None, entry, os.path.join(d_path, "[ Demos, 1-64KB ].ags"), None, None) ags_create_entry( None, entry, os.path.join(d_path, "[ Demos by title ].ags", letter + ".ags"), None, None) ags_create_entry(None, entry, os.path.join(d_path, "[ Demos by group ].ags", group_letter + ".ags"), None, None, prefix=group) ags_create_entry( None, entry, os.path.join(d_path, "[ Demos by year ].ags", year + ".ags"), None, None) # Run-scripts for randomizer if entry["category"].lower() == "game" and not entry["issues"]: ags_create_entry(None, entry, os.path.join(path, "Run"), None, None, only_script=True) #if value["issues"]: # ags_create_entry(None, value, os.path.join(path, "[ Issues ].ags"), None, None) # Notes for created directories if util.is_dir(os.path.join(path, "[ All Games ].ags")): open(os.path.join(path, "[ All Games ].txt"), mode="w", encoding="latin-1").write("Browse all games alphabetically.") if util.is_dir(os.path.join(path, "[ All Games, by year ].ags")): open(os.path.join(path, "[ All Games, by year ].txt"), mode="w", encoding="latin-1").write("Browse all games by release year.") if util.is_dir(os.path.join(d_path, "[ Demos by title ].ags")): open(os.path.join(d_path, "[ Demos by title ].txt"), mode="w", encoding="latin-1").write("Browse demos by title.") if util.is_dir(os.path.join(d_path, "[ Demos by group ].ags")): open(os.path.join(d_path, "[ Demos by group ].txt"), mode="w", encoding="latin-1").write("Browse demos by release group.") if util.is_dir(os.path.join(d_path, "[ Demos by year ].ags")): open(os.path.join(d_path, "[ Demos by year ].txt"), mode="w", encoding="latin-1").write("Browse demos by release year.") if util.is_dir(os.path.join(d_path, "[ Demos, 1-64KB ].ags")): open(os.path.join(d_path, "[ Demos, 1-64KB ].txt"), mode="w", encoding="latin-1").write( "Browse demos in the 1/4/40/64KB categories.") if util.is_dir(os.path.join(d_path, "[ Demos, crack intros ].ags")): open(os.path.join(d_path, "[ Demos, crack intros ].txt"), mode="w", encoding="latin-1").write( "A glimpse into the origins of the demo scene.") if util.is_dir(os.path.join(d_path, "[ Disk Magazines ].ags")): open(os.path.join(d_path, "[ Disk Magazines ].txt"), mode="w", encoding="latin-1").write("A selection of scene disk magazines.") if util.is_dir(os.path.join(path, "[ Issues ].ags")): open( os.path.join(path, "[ Issues ].txt"), mode="w", encoding="latin-1" ).write( "Titles with known issues on Minimig-AGA.\n(Please report any new or resolved issues!)" )