def synchronize(self): if not is_locker_enabled(): return if "locker" not in self.context.meta: # We haven't looked up synchronization information from the server, # that probably means we didn't want to synchronize with the # server now, therefore we just return. return if self.stop_check(): return database = LockerDatabase.instance() sync_version = database.get_sync_version() if sync_version == self.context.meta["locker"]["sync"]: print("[SYNC] Locker data already up to date") return self.set_status(gettext("Fetching locker data...")) data = self.fetch_data("/api/locker-sync/1") assert len(data) % 20 == 0 self.set_status(gettext("Updating locker data...")) database.clear() k = 0 while k < len(data): sha1_bytes = data[k:k + 20] database.add_sha1_binary(sha1_bytes) k += 20 database.set_sync_version(self.context.meta["locker"]["sync"]) self.set_status(gettext("Committing locker data...")) self.update_file_database_timestamps() database.commit()
def synchronize(self): if not self.is_authenticated(): print("ListsSynchronizer: not authenticated") return if self.stop_check(): return if "lists" not in self.context.meta: # Haven't looked up synchronization information from the server. return self.set_status(gettext("Updating game lists...")) database = Database.instance() cursor = database.cursor() cursor.execute("SELECT uuid, name, sync FROM game_list") # existing_lists = {} existing_syncs = {} for row in cursor: uuid, name, sync = row # existing_lists[uuid] = { # "name": name, # "sync": sync, # } existing_syncs[uuid] = sync # existing_syncs.sort() for list_uuid, list_info in self.context.meta["lists"].items(): if list_info["sync"] == existing_syncs.get(list_uuid, None): print("[SYNC] List {} already synced".format(list_uuid)) else: self.set_status( gettext("Updating list '{0}'...".format(list_info["name"])) ) self.synchronize_list(database, list_uuid, list_info) for existing_list_uuid in existing_syncs: for list_uuid in self.context.meta["lists"]: if list_uuid == existing_list_uuid: break else: # this old list should be removed self.set_status( gettext("Removing list {0}".format(existing_list_uuid)) ) self.remove_list(database, existing_list_uuid) database.commit()
def init_changes(self): if self.change_handler is None: return print("LaunchHandler.init_changes") self.on_progress(gettext("Restoring changes...")) self.change_handler.init( self.get_state_dir(), ignore=["*.uss", "*.sdf"] )
def synchronize(self): if "database" not in self.context.meta: # we haven't looked up synchronization information from the server, # that probably means we didn't want to synchronize with the # server now, therefore we just return print("database not in self.context.meta") return self._synchronize() if self.stop_check(): self.client.database.rollback() else: print("committing data") self.set_status(gettext("Updating database"), gettext("Committing data...")) self.database.commit() print("done") if os.environ.get("FSGS_WRITE_DAT_FILES", "") == "1": self.write_dat_file()
def fetch_data(self, url): for i in range(10): try: return self.fetch_data_attempt(url) except Exception as e: print(e) sleep_time = 2.0 + i * 0.3 # FIXME: change second {0} to {1} self.set_status( gettext("Download failed (attempt {0}) - " "retrying in {1} seconds").format( i + 1, int(sleep_time))) for _ in range(int(sleep_time) * 10): time.sleep(0.1) if self.stop_check(): return self.set_status( gettext("Retrying last operation (attempt {0})").format(i + 1)) return self.fetch_data_attempt(url)
def fetch_game_sync_data(self): last_id = self.database.get_last_game_id() if self.context.meta["games"][self.platform_id]["sync"] == last_id: print("[SYNC] Platform {} already synced".format(self.platform_id)) return b"" self.set_status( gettext("Fetching database entries ({0})").format(last_id + 1)) url = "{0}/api/sync/{1}/games?v=3&sync={2}".format( self.url_prefix(), self.platform_id, last_id) print(url) data = self.fetch_data(url) # self.downloaded_size += len(data) return data
def fetch_rating_entries(self): cursor = self.client.database.cursor() cursor.execute("SELECT max(updated) FROM rating") row = cursor.fetchone() last_time = row[0] if not last_time: last_time = "2012-01-01 00:00:00" self.set_status( gettext("Fetching game ratings ({0})").format(last_time)) url = "{0}/api/sync/{1}/ratings?from={2}".format( self.url_prefix(), self.platform_id, quote_plus(last_time)) print(url) # data, json_data = self.fetch_json(url) json_data = self.fetch_json(url) # self.downloaded_size += len(data) return json_data
def prepare_floppies(self): print("LaunchHandler.copy_floppies") current_task.set_progress(gettext("Preparing floppy images...")) # self.on_progress(gettext("Preparing floppy images...")) floppies = [] for i in range(Amiga.MAX_FLOPPY_DRIVES): key = "floppy_drive_{0}".format(i) if self.config.get(key, ""): floppies.append(self.config[key]) self.prepare_floppy(key) for i in range(Amiga.MAX_FLOPPY_IMAGES): key = "floppy_image_{0}".format(i) if self.config.get(key, ""): break else: print("floppy image list is empty") for j, floppy in enumerate(floppies): self.config["floppy_image_{0}".format(j)] = floppy max_image = -1 for i in range(Amiga.MAX_FLOPPY_IMAGES): key = "floppy_image_{0}".format(i) self.prepare_floppy(key) if self.config.get(key, ""): max_image = i save_image = max_image + 1 if self.config.get("save_disk", "") != "0": s = Resources("fsgamesys").stream("amiga/adf_save_disk.dat") data = s.read() data = zlib.decompress(data) save_disk_sha1 = hashlib.sha1(data).hexdigest() # save_disk = os.path.join(self.temp_dir, "Save Disk.adf") save_disk = os.path.join( self.temp_dir, save_disk_sha1[:8].upper() + ".adf" ) with open(save_disk, "wb") as f: f.write(data) self.config[f"floppy_image_{save_image}"] = save_disk self.config[f"floppy_image_{save_image}_label"] = "Save Disk"
def prepare_hard_drive(self, index): key = "hard_drive_{}".format(index) src = self.config.get(key, "") dummy, ext = os.path.splitext(src) ext = ext.lower() if is_http_url(src): name = src.rsplit("/", 1)[-1] name = unquote(name) self.on_progress(gettext("Downloading {0}...".format(name))) dest = os.path.join(self.temp_dir, name) Downloader.install_file_from_url(src, dest) src = dest elif src.startswith("hd://game/"): self.unpack_game_hard_drive(index, src) self.disable_save_states() return elif src.startswith("file_list:"): self.unpack_game_hard_drive(index, src) self.disable_save_states() return elif src.startswith("hd://template/workbench/"): self.prepare_workbench_hard_drive(index, src) self.disable_save_states() return elif src.startswith("hd://template/empty/"): self.prepare_empty_hard_drive(index, src) self.disable_save_states() return if ext in Archive.extensions: print("zipped hard drive", src) self.unpack_hard_drive(index, src) self.disable_save_states() elif src.endswith("HardDrive"): print("XML-described hard drive", src) self.unpack_hard_drive(index, src) self.disable_save_states() else: src = Paths.expand_path(src) self.config[key] = src
def cleanup(self): print("LaunchHandler.cleanup") if os.environ.get("FSGS_CLEANUP", "") == "0": print("[DRIVER] NOTICE: keeping temp files around...") return self.on_progress(gettext("Cleaning up...")) # self.delete_tree(self.temp_dir) shutil.rmtree(self.temp_dir, ignore_errors=True) state_dir = self.get_state_dir() try: # this will only succeed if the directory is empty -we don't # want to leave unnecessary empty state directories os.rmdir(state_dir) print("removed", repr(state_dir)) # also try to remove the parent (letter dir) os.rmdir(os.path.dirname(state_dir)) print("removed", repr(os.path.dirname(state_dir))) except OSError: # could not delete directories - ok - probably has content pass
def update_changes(self): if self.change_handler is None: return print("LaunchHandler.update_changes") self.on_progress(gettext("Saving changes...")) self.change_handler.update(self.get_state_dir())
def prepare_hard_drives(self): print("LaunchHandler.prepare_hard_drives") current_task.set_progress(gettext("Preparing hard drives...")) # self.on_progress(gettext("Preparing hard drives...")) for i in range(0, Amiga.MAX_HARD_DRIVES): self.prepare_hard_drive(i)
def prepare_roms(self): print("LaunchHandler.prepare_roms") current_task.set_progress(gettext("Preparing kickstart ROMs...")) amiga_model = self.config.get("amiga_model", "A500") model_config = Amiga.get_model_config(amiga_model) roms = [("kickstart_file", model_config["kickstarts"])] if self.config["kickstart_ext_file"] or model_config["ext_roms"]: # not all Amigas have extended ROMs roms.append(("kickstart_ext_file", model_config["ext_roms"])) if amiga_model.lower() == "cd32/fmv": roms.append(("fvm_rom", [CD32_FMV_ROM])) if self.config["graphics_card"].lower().startswith("picasso-iv"): roms.append(("graphics_card_rom", [PICASSO_IV_74_ROM])) if self.config["accelerator"].lower() == "cyberstorm-ppc": roms.append(("accelerator_rom", ["cyberstormppc.rom"])) if self.config["freezer_cartridge"] == "action-replay-2": # Ideally, we would want to recognize ROMs based on zeroing the # first four bytes, but right now we simply recognize a common # additional version. freezer_cartridge_rom isn't a real option, # we just want to copy the rom file and let FS-UAE find it roms.append( ( "[freezer_cartridge]", [ ACTION_REPLAY_MK_II_2_14_ROM.sha1, ACTION_REPLAY_MK_II_2_14_MOD_ROM.sha1, ], ) ) elif self.config["freezer_cartridge"] == "action-replay-3": roms.append( ( "[freezer_cartridge]", [ ACTION_REPLAY_MK_III_3_17_ROM.sha1, ACTION_REPLAY_MK_III_3_17_MOD_ROM.sha1, ], ) ) use_temp_kickstarts_dir = False for config_key, default_roms in roms: print("[ROM]", config_key, default_roms) src = self.config[config_key] print("[ROM]", src) if not src: for sha1 in default_roms: print("[ROM] Trying", sha1) if is_sha1(sha1): rom_src = self.fsgs.file.find_by_sha1(sha1) if rom_src: src = rom_src print("[ROM] Found", rom_src) break else: # roms_dir = FSGSDirectories.get_kickstarts_dir() # src = os.path.join(roms_dir, sha1) # if os.path.exists(src): # break # loop up file in roms dir instead src = sha1 elif src == "internal": continue elif src: src = Paths.expand_path(src) if not src: raise TaskFailure( gettext( "Did not find required Kickstart or " "ROM for {}. Wanted one of these files: {}".format( config_key, repr(default_roms) ) ) ) dest = os.path.join(self.temp_dir, os.path.basename(src)) def lookup_rom_from_src(src): parts = src.split(":", 1) if len(parts) == 2 and len(parts[0]) > 1: # src has a scheme (not a Windows drive letter). Assume # we can find this file. return src archive = Archive(src) if archive.exists(src): return src dirs = [self.fsgs.amiga.get_kickstarts_dir()] for dir_ in dirs: path = os.path.join(dir_, src) print("[ROM] Checking", repr(path)) archive = Archive(path) if archive.exists(path): return path return None org_src = src src = lookup_rom_from_src(src) if not src and org_src == "cyberstormppc.rom": src = lookup_rom_from_src( "ralphschmidt-cyberstorm-ppc-4471.rom" ) if not src: for ( dir_ ) in FSGSDirectories.get_amiga_forever_directories(): path = os.path.join( dir_, "Shared", "rom", "ralphschmidt-cyberstorm-ppc-4471.rom", ) if os.path.exists(path): src = path print("[ROM] Found", path) break else: print("[ROM] Trying", path) stream = None # FIXME: prepare_roms should be rewritten, it's kind of crap. # Rom patching and decryption should be handled differently. Should # use file database filters, and decryption via rom.key should only # be supported when using uncompressed files directly on disk. if not src or not os.path.exists(src): try: stream = self.fsgs.file.open(src) if stream is None: raise FileNotFoundError(src) except FileNotFoundError: raise TaskFailure( gettext( "Cannot find required ROM " "file: {name}".format(name=repr(org_src)) ) ) with open(dest, "wb") as f: if stream: print("[ROM] From stream => {}".format(dest)) rom = {} rom["data"] = stream.read() rom["sha1"] = hashlib.sha1(rom["data"]).hexdigest() ROMManager.patch_rom(rom) f.write(rom["data"]) else: archive = Archive(src) ROMManager.decrypt_archive_rom(archive, src, file=f) if use_temp_kickstarts_dir: self.config[config_key] = os.path.basename(src) else: self.config[config_key] = dest if use_temp_kickstarts_dir: self.config["kickstarts_dir"] = self.temp_dir
def _synchronize(self): if (self.context.meta["database"]["version"] != self.database.get_game_database_version()): self.set_status(gettext("Resetting game database...")) self.database.clear() self.database.set_game_database_version( self.context.meta["database"]["version"]) self.set_status(gettext("Synchronizing game database...")) while True: if self.stop_check(): return data = self.fetch_game_sync_data() if self.stop_check(): return if not data: print("no more changes") break t1 = time.time() k = 0 while k < len(data): game_sync_id = bytes_to_int(data[k:k + 4]) k += 4 game_uuid = data[k:k + 16] k += 16 game_data_size = bytes_to_int(data[k:k + 4]) k += 4 game_data = data[k:k + game_data_size] k += game_data_size # print(game_uuid, "game data len:", len(game_data)) if len(game_data) > 0: self.database.add_game(game_sync_id, game_uuid, game_data) else: self.database.delete_game(game_sync_id, game_uuid) t2 = time.time() print(" {0:0.2f} seconds".format(t2 - t1)) last_json_data = "" while True: if self.stop_check(): return json_data = self.fetch_rating_entries() if self.stop_check(): return if json_data == last_json_data: print("no more changes") break last_json_data = json_data num_changes = len(json_data["ratings"]) print(" processing {0} entries".format(num_changes)) t1 = time.time() for update in json_data["ratings"]: cursor = self.client.database.cursor() cursor.execute( "SELECT count(*) FROM rating WHERE game_uuid = " "? AND work_rating = ? AND like_rating = ? " "AND updated = ?", ( update["game"], update["work"], update["like"], update["updated"], ), ) if cursor.fetchone()[0] == 1: # we want to avoid needlessly creating update transactions continue cursor.execute("DELETE FROM rating WHERE game_uuid = ?", (update["game"], )) cursor.execute( "INSERT INTO rating (game_uuid, work_rating, " "like_rating, updated) VALUES (?, ?, ?, ?)", ( update["game"], update["work"], update["like"], update["updated"], ), ) t2 = time.time() print(" {0:0.2f} seconds".format(t2 - t1))