def check_dirs(self): """Do all the directories exist?""" for dir in [ self._saves, self._chest, self._mods, self._images, self._models, self._workshop ]: if not os.path.isdir(dir): tts.logger().warn("TTS Dir missing: {}".format(dir)) return False return True
def download(self): if not self.download_sb.save: tts.logger().warn("Internal error: no save when attempting to download") messagebox.showinfo("TTS Manager", "Download failed (see log).") return if self.download_sb.save.download(): messagebox.showinfo("TTS Manager", "Download done.") else: messagebox.showinfo("TTS Manager", "Download failed (see log).")
def get_filenames_in(self, search_path): if not os.path.isdir(search_path): tts.logger().warn( "Tried to search non-existent path {}.".format(search_path)) return [] return [ os.path.splitext(file)[0] for file in os.listdir(search_path) if os.path.splitext(file)[1].lower() == '.json' ]
def download(self): if not self.download_sb.save: tts.logger().warn( "Internal error: no save when attempting to download") messagebox.showinfo("TTS Manager", "Download failed (see log).") return if self.download_sb.save.download(): messagebox.showinfo("TTS Manager", "Download done.") else: messagebox.showinfo("TTS Manager", "Download failed (see log).")
def export(self,export_filename): log=tts.logger() log.info("About to export %s to %s" % (self.ident,export_filename)) zfs = tts.filesystem.FileSystem(base_path="") zipComment = { "Ver":PAK_VER, "Id":self.ident, "Type":self.save_type.name } # TODO: error checking. with zipfile.ZipFile(export_filename,'w') as zf: zf.comment=json.dumps(zipComment).encode('utf-8') log.debug("Writing {} (base {}) to {}".format(self.filename,os.path.basename(self.filename),zfs.get_path_by_type(os.path.basename(self.filename),self.save_type))) zf.write(self.filename,zfs.get_path_by_type(os.path.basename(self.filename),self.save_type)) if self.thumbnail: filepath=zfs.get_path_by_type(os.path.basename(self.thumbnail),self.save_type) arcname=os.path.join(os.path.dirname(filepath), 'Thumbnails', os.path.basename(filepath)) zf.write(self.thumbnail,arcname=arcname) log.debug(f"Writing {self.thumbnail} to {arcname}") for url in self.models: log.debug("Writing {} to {}".format(url.location,zfs.get_model_path(os.path.basename(url.location)))) zf.write(url.location,zfs.get_model_path(os.path.basename(url.location))) for url in self.images: log.debug("Writing {} to {}".format(url.location,zfs.get_model_path(os.path.basename(url.location)))) zf.write(url.location,zfs.get_image_path(os.path.basename(url.location))) log.info("File exported.")
def __init__(self,savedata,filename,ident,filesystem,save_type=SaveType.workshop): log=tts.logger() self.data = savedata self.ident=ident if self.data['SaveName']: self.save_name=self.data['SaveName'] else: self.save_name=self.ident self.save_type=save_type self.filesystem = filesystem self.filename=filename thumbnail = os.path.extsep.join(filename.split(os.path.extsep)[0:-1] + ['png']) #Known issue: this fails if filename doesn't contain an extsep if os.path.isfile(thumbnail): self.thumbnail = thumbnail else: self.thumbnail = None self.thumb=os.path.isfile(os.path.extsep.join([filename.split(os.path.extsep)[0],'png'])) #strip the local part off. fileparts=self.filename.split(os.path.sep) while fileparts[0]!='Saves' and fileparts[0]!='Mods': fileparts=fileparts[1:] self.basename=os.path.join(*fileparts) log.debug("filename: {},save_name: {}, basename: {}".format(self.filename,self.save_name,self.basename)) self.urls = [ Url(url,self.filesystem) for url in get_save_urls(savedata) ] self.missing = [ x for x in self.urls if not x.exists ] self.images=[ x for x in self.urls if x.exists and x.isImage ] self.models=[ x for x in self.urls if x.exists and not x.isImage ] log.debug("Urls found {}:{} missing, {} models, {} images".format(len(self.urls),len(self.missing),len(self.models),len(self.images)))
def __init__(self, savedata, filename, ident, filesystem, save_type=SaveType.workshop): log = tts.logger() self.data = savedata self.ident = ident if self.data['SaveName']: self.save_name = self.data['SaveName'] else: self.save_name = self.ident self.save_type = save_type self.filesystem = filesystem self.filename = filename #strip the local part off. fileparts = self.filename.split(os.path.sep) while fileparts[0] != 'Saves' and fileparts[0] != 'Mods': fileparts = fileparts[1:] self.basename = os.path.join(*fileparts) log.debug("filename: {},save_name: {}, basename: {}".format( self.filename, self.save_name, self.basename)) self.urls = [ Url(url, self.filesystem) for url in get_save_urls(savedata) ] self.missing = [x for x in self.urls if not x.exists] self.images = [x for x in self.urls if x.exists and x.isImage] self.models = [x for x in self.urls if x.exists and not x.isImage] log.debug("Urls found {}:{} missing, {} models, {} images".format( len(self.urls), len(self.missing), len(self.models), len(self.images)))
def __init__(self, root): self.log = tts.logger() self.log.setLevel(logging.WARN) self.preferences = tts.preferences.Preferences() self.root = root if self.preferences.firstRun: messagebox.showinfo( "TTS Manager", "First run detected.\nOpening preferences pane.") self.showPreferences() if not self.preferences.validate(): messagebox.showwarning( "TTS Manager", "Invalid preferences detected.\nOpening preferences pane.") self.showPreferences() self.reload_filesystem() mode_notebook = ttk.Notebook(root) list_frame = ttk.Frame(mode_notebook) self.populate_list_frame(list_frame) export_frame = ttk.Frame(mode_notebook) self.populate_export_frame(export_frame) import_frame = ttk.Frame(mode_notebook) self.populate_import_frame(import_frame) download_frame = ttk.Frame(mode_notebook) self.populate_download_frame(download_frame) mode_notebook.add(list_frame, text="List") mode_notebook.add(export_frame, text="Export") mode_notebook.add(import_frame, text="Import") mode_notebook.add(download_frame, text="Download") mode_notebook.pack(expand=1, fill="both") logger_frame = ttk.Frame(root) logger_frame.pack(fill=Tk.X, expand=Tk.Y) ttk.Label(logger_frame, text="Log:").pack(side=Tk.LEFT) self.loggerLevel = ttk.Combobox( logger_frame, state="readonly", value=['debug', 'infomation', 'warning', 'error']) self.loggerLevel.bind("<<ComboboxSelected>>", self.change_log_level) self.loggerLevel.current(2) self.loggerLevel.pack(side=Tk.LEFT) log_frame = ttk.Frame(root) log_frame.pack(fill=Tk.X, expand=Tk.Y) logger = ScrolledText.ScrolledText( log_frame, state=Tk.DISABLED, height=5, ) logger.pack(fill=Tk.BOTH, expand=Tk.Y, side=Tk.BOTTOM) tts.setLoggerConsole(logger) pref_frame = ttk.Frame(root) pref_frame.pack(fill=Tk.X, expand=Tk.Y) ttk.Button(pref_frame, text="Preferences", command=self.showPreferences).pack()
def importPak(filesystem, filename): log = tts.logger() log.debug("About to import {} into {}.".format(filename, filesystem)) if not os.path.isfile(filename): log.error("Unable to find mod pak {}".format(filename)) return False if not zipfile.is_zipfile(filename): log.error("Mod pak {} format appears corrupt.".format(filename)) return False try: with zipfile.ZipFile(filename, 'r') as zf: bad_file = zf.testzip() if bad_file: log.error("At least one corrupt file found in {} - {}".format( filename, bad_file)) return False if not zf.comment: # TODO: allow overrider log.error("Missing pak header comment in {}. Aborting import.". format(filename)) return False metadata = json.loads(zf.comment.decode('utf-8')) if not tts.validate_metadata(metadata): log.error( "Unable to read pak header comment in {}. Aborting import." .format(filename)) return False log.info("Extracting {} pak for id {} (pak version {})".format( metadata['Type'], metadata['Id'], metadata['Ver'])) for name in zf.namelist(): # Note that zips always use '/' as the seperator it seems. start = name.split('/')[0] if start == 'Saves': log.debug("Extracting {} to {}.".format( name, filesystem.basepath)) zf.extract(name, filesystem.basepath) else: log.debug("Extracting {} to {}".format( name, filesystem.modpath)) zf.extract(name, filesystem.modpath) except zipfile.BadZipFile as e: log.error("Mod pak {} format appears corrupt - {}.".format( filename, e)) except zipfile.LargeZipFile as e: log.error( "Mod pak {} requires large zip capability - {}.\nThis shouldn't happen - please raise a bug." .format(filename, e)) log.info("Imported {} successfully.".format(filename)) return True
def __init__(self, root): self.log = tts.logger() self.log.setLevel(logging.WARN) self.preferences = tts.preferences.Preferences() self.root = root if self.preferences.firstRun: messagebox.showinfo("TTS Manager", "First run detected.\nOpening preferences pane.") self.showPreferences() if not self.preferences.validate(): messagebox.showwarning("TTS Manager", "Invalid preferences detected.\nOpening preferences pane.") self.showPreferences() self.reload_filesystem() mode_notebook = ttk.Notebook(root) list_frame = ttk.Frame(mode_notebook) self.populate_list_frame(list_frame) export_frame = ttk.Frame(mode_notebook) self.populate_export_frame(export_frame) import_frame = ttk.Frame(mode_notebook) self.populate_import_frame(import_frame) download_frame = ttk.Frame(mode_notebook) self.populate_download_frame(download_frame) mode_notebook.add(list_frame, text="List") mode_notebook.add(export_frame, text="Export") mode_notebook.add(import_frame, text="Import") mode_notebook.add(download_frame, text="Download") mode_notebook.pack(expand=1, fill="both") logger_frame = ttk.Frame(root) logger_frame.pack(fill=Tk.X, expand=Tk.Y) ttk.Label(logger_frame, text="Log:").pack(side=Tk.LEFT) self.loggerLevel = ttk.Combobox( logger_frame, state="readonly", value=["debug", "infomation", "warning", "error"] ) self.loggerLevel.bind("<<ComboboxSelected>>", self.change_log_level) self.loggerLevel.current(2) self.loggerLevel.pack(side=Tk.LEFT) log_frame = ttk.Frame(root) log_frame.pack(fill=Tk.X, expand=Tk.Y) logger = ScrolledText.ScrolledText(log_frame, state=Tk.DISABLED, height=5) logger.pack(fill=Tk.BOTH, expand=Tk.Y, side=Tk.BOTTOM) tts.setLoggerConsole(logger) pref_frame = ttk.Frame(root) pref_frame.pack(fill=Tk.X, expand=Tk.Y) ttk.Button(pref_frame, text="Preferences", command=self.showPreferences).pack()
def download(self): log = tts.logger() if self.exists: return True url = self.url protocols = url.split('://') if len(protocols) == 1: log.warn("Missing protocol for {}. Assuming http://.".format(url)) url = "http://" + url log.info("Downloading data for %s." % url) user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)' headers = {'User-Agent': user_agent} request = urllib.request.Request(url, headers=headers) try: response = urllib.request.urlopen(request) except urllib.error.URLError as e: log.error("Error downloading %s (%s)" % (url, e)) return False try: data = response.read() except http.client.IncompleteRead as e: #This error is the http server did not return the whole file log.error("Error downloading %s (%s)" % (url, e)) return False imagetype = imghdr.what('', data) filename = None if imagetype == None: filename = self.filesystem.get_model_path(self.stripped_url + '.obj') log.debug("File is OBJ") else: if imagetype == 'jpeg': imagetype = 'jpg' log.debug("File is %s" % imagetype) filename = self.filesystem.get_image_path(self.stripped_url + '.' + imagetype) try: fh = open(filename, 'wb') fh.write(data) fh.close() except IOError as e: log.error("Error writing file %s (%s)" % (filename, e)) return False self._looked_for_location = False return True
def get_save_urls(savedata): ''' Iterate over all the values in the json file, building a (key,value) set of all the values whose key ends in "URL" ''' log = tts.logger() def parse_list(data): urls = set() for item in data: urls |= get_save_urls(item) return urls def parse_dict(data): urls = set() if not data: return urls for key in data: if type(data[key] ) is not str or key == 'PageURL' or key == 'Rules': # If it isn't a string, it can't be an url. # Also don't save tablet state / rulebooks continue if key.endswith('URL') and data[key] != '': log.debug("Found {}:{}".format(key, data[key])) urls.add(data[key]) continue protocols = data[key].split('://') if len(protocols) == 1: # not an url continue if protocols[0] in ['http', 'https', 'ftp']: # belt + braces. urls.add(data[key]) log.debug("Found {}:{}".format(key, data[key])) continue for item in data.values(): urls |= get_save_urls(item) return urls if type(savedata) is list: return parse_list(savedata) if type(savedata) is dict: return parse_dict(savedata) return set()
def do_export(self, args): filename = None if args.output: if os.path.isdir(args.output): filename = os.path.join(args.output, args.id + ".pak") else: filename = args.output else: filename = args.id + ".pak" data = None json_filename = None if not args.save_type: args.save_type = self.filesystem.get_json_filename_type(args.id) if not args.save_type: return 1, "Unable to determine type of id %s" % args.id json_filename = self.filesystem.get_json_filename_for_type( args.id, args.save_type) if not json_filename: return 1, "Unable to find filename for id %s (wrong -s/-w/-c specified?)" % args.id data = tts.load_json_file(json_filename) if not data: return 1, "Unable to load data for file %s" % json_filename save = tts.Save(savedata=data, filename=json_filename, ident=args.id, save_type=args.save_type, filesystem=self.filesystem) if not save.isInstalled: if not args.download: return 1, "Unable to find all urls required by %s. Rerun with -d to try and download them or open it within TTS.\n%s" % ( args.id, save) else: tts.logger().info("Downloading missing files...") successful = save.download() if successful: tts.logger().info("Files downloaded successfully.") else: return 1, "Some files failed to download" if os.path.isfile(filename) and not args.force: return 1, "%s already exists. Please specify another file or use '-f'" % filename tts.logger().info("Exporting json file %s to %s" % (args.id, filename)) save.export(filename) # TODO: exception handling return 0, "Exported %s to %s" % (args.id, filename)
def download(self): log=tts.logger() log.warn("About to download files for %s" % self.save_name) if self.isInstalled==True: log.info("All files already downloaded.") return True successful=True url_counter=1 for url in self.missing: log.warn("Downloading file {} of {} for {}".format(url_counter,len(self.missing),self.save_name)) result = url.download() if not result: successful=False url_counter+=1 #TODO:: remove items from missing list. return successful log.info("All files downloaded.") return True
def do_export(self,args): filename=None if args.output: if os.path.isdir(args.output): filename=os.path.join(args.output,args.id+".pak") else: filename=args.output else: filename=args.id+".pak" data=None json_filename=None if not args.save_type: args.save_type=self.filesystem.get_json_filename_type(args.id) if not args.save_type: return 1,"Unable to determine type of id %s" % args.id json_filename=self.filesystem.get_json_filename_for_type(args.id,args.save_type) if not json_filename: return 1, "Unable to find filename for id %s (wrong -s/-w/-c specified?)" % args.id data=tts.load_json_file(json_filename) if not data: return 1, "Unable to load data for file %s" % json_filename save=tts.Save(savedata=data, filename=json_filename, ident=args.id, save_type=args.save_type, filesystem=self.filesystem) if not save.isInstalled: if not args.download: return 1, "Unable to find all urls required by %s. Rerun with -d to try and download them or open it within TTS.\n%s" % (args.id,save) else: tts.logger().info("Downloading missing files...") successful = save.download() if successful: tts.logger().info("Files downloaded successfully.") else: return 1, "Some files failed to download" if os.path.isfile(filename) and not args.force: return 1,"%s already exists. Please specify another file or use '-f'" % filename tts.logger().info("Exporting json file %s to %s" % (args.id,filename)) save.export(filename) # TODO: exception handling return 0,"Exported %s to %s" % (args.id,filename)
def download(self): log = tts.logger() if self.exists: return True url = self.url protocols = url.split("://") if len(protocols) == 1: log.warn("Missing protocol for {}. Assuming http://.".format(url)) url = "http://" + url log.info("Downloading data for %s." % url) user_agent = "Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)" headers = {"User-Agent": user_agent} request = urllib.request.Request(url, headers=headers) try: response = urllib.request.urlopen(request) except urllib.error.URLError as e: log.error("Error downloading %s (%s)" % (url, e)) return False data = response.read() imagetype = imghdr.what("", data) filename = None if imagetype == None: filename = self.filesystem.get_model_path(self.stripped_url + ".obj") log.debug("File is OBJ") else: if imagetype == "jpeg": imagetype = "jpg" log.debug("File is %s" % imagetype) filename = self.filesystem.get_image_path(self.stripped_url + "." + imagetype) try: fh = open(filename, "wb") fh.write(data) fh.close() except IOError as e: log.error("Error writing file %s (%s)" % (filename, e)) return False self._looked_for_location = False return True
def get_filenames_in(self,search_path): if not os.path.isdir(search_path): tts.logger().warn("Tried to search non-existent path {}.".format(search_path)) return [] return [os.path.splitext(file)[0] for file in os.listdir(search_path) if os.path.splitext(file)[1].lower()=='.json']
def change_log_level(self, event): levels = [logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR] tts.logger().info("Setting log level to %s" % levels[self.loggerLevel.current()]) tts.logger().setLevel(levels[self.loggerLevel.current()])
def __init__(self): self.preferences = tts.preferences.Preferences() parser = argparse.ArgumentParser( description="Manipulate Tabletop Simulator files") parser.add_argument("-d", "--directory", help="Override TTS cache directory") parser.add_argument("-l", "--loglevel", help="Set logging level", choices=['debug', 'info', 'warn', 'error']) subparsers = parser.add_subparsers(dest='parser', title='command', description='Valid commands.') subparsers.required = True # add list command parser_list = subparsers.add_parser('list', help="List installed mods.", description=''' List installed mods. If no id is provided, then this will return a list of all installed modules. If an id is provided, then this will list the contents of that modules. ''') group_list = parser_list.add_mutually_exclusive_group() group_list.add_argument("-w", "--workshop", action="store_const", metavar='save_type', dest='save_type', const=tts.SaveType.workshop, help="List workshop files (the default).") group_list.add_argument("-s", "--save", action="store_const", metavar='save_type', dest='save_type', const=tts.SaveType.save, help="List saves.") group_list.add_argument("-c", "--chest", action="store_const", metavar='save_type', dest='save_type', const=tts.SaveType.chest, help="List chest files.") parser_list.add_argument("id", nargs='?', help="ID of specific mod to list details of.") parser_list.set_defaults(func=self.do_list) # export command parser_export = subparsers.add_parser( 'export', help="Export a mod.", description='Export a mod in a format suitible for later import.') group_export = parser_export.add_mutually_exclusive_group() group_export.add_argument("-w", "--workshop", action="store_const", dest='save_type', metavar='save_type', const=tts.SaveType.workshop, help="ID is of workshop file (the default).") group_export.add_argument("-s", "--save", action="store_const", dest='save_type', metavar='save_type', const=tts.SaveType.save, help="ID is of savegame file.") group_export.add_argument("-c", "--chest", action="store_const", dest='save_type', metavar='save_type', const=tts.SaveType.chest, help="ID is of chest file.") parser_export.add_argument( "id", help="ID of mod/name of savegame to export.") parser_export.add_argument("-o", "--output", help="Location/file to export to.") parser_export.add_argument("-f", "--force", action="store_true", help="Force creation of export file.") parser_export.add_argument( "-d", "--download", action="store_true", help="Attempt to download missing cache files. (EXPERIMENTAL)") parser_export.set_defaults(func=self.do_export) # import command parser_import = subparsers.add_parser( 'import', help="Import a mod.", description="Import an previously exported mod.") parser_import.add_argument("file", help="Mod pak file to import.") parser_import.set_defaults(func=self.do_import) # download command parser_download = subparsers.add_parser( 'download', help='Download mod files.', description= 'Attempt to download any missing files for an installed mod.') group_download = parser_download.add_mutually_exclusive_group() group_download.add_argument("-w", "--workshop", action="store_const", dest='save_type', metavar='save_type', const=tts.SaveType.workshop, help="ID is of workshop file.") group_download.add_argument("-s", "--save", action="store_const", dest='save_type', metavar='save_type', const=tts.SaveType.save, help="ID is of savegame file.") group_download.add_argument("-c", "--chest", action="store_const", dest='save_type', metavar='save_type', const=tts.SaveType.chest, help="ID is of chest file.") group_download_target = parser_download.add_mutually_exclusive_group( required=True) group_download_target.add_argument("-a", "--all", action="store_true", help="Download all.") group_download_target.add_argument( "id", nargs='?', help="ID of mod/name of savegame to download.") parser_download.set_defaults(func=self.do_download) # cache command parser_cache = subparsers.add_parser('cache', help='Work with the cache.') subparsers_cache = parser_cache.add_subparsers( dest='parser_cache', title='cache_command', description='Valid sub-commands.') subparsers_cache.required = True parser_cache_create = subparsers_cache.add_parser( 'create', help='(re)create cache directory') parser_cache_create.set_defaults(func=self.do_cache_create) # config command parser_config = subparsers.add_parser('config', help='Configure tts manager.') subparsers_config = parser_config.add_subparsers( dest='parser_config', title='config_command', description='Valid sub-commands.') subparsers_config.required = True parser_config_list = subparsers_config.add_parser( 'list', help='List configuration.') parser_config_list.set_defaults(func=self.do_config_list) parser_config_validate = subparsers_config.add_parser( 'validate', help='Validate configuration.') parser_config_validate.set_defaults(func=self.do_config_validate) parser_config_reset = subparsers_config.add_parser( 'reset', help='Reset configuration.') parser_config_reset.set_defaults(func=self.do_config_reset) parser_config_set = subparsers_config.add_parser( 'set', help='Set configuration parameters.') parser_config_set.set_defaults(func=self.do_config_set) parser_config_set.add_argument("-m", "--mod_location", choices=['documents', 'gamedata'], help="Where mods are stored.") parser_config_set.add_argument("-t", "--tts_location", help="TTS Install directory") args = parser.parse_args() # set logging if args.loglevel: logmap = { 'debug': logging.DEBUG, 'info': logging.INFO, 'warn': logging.WARN, 'error': logging.ERROR } tts.logger().setLevel(logmap[args.loglevel]) else: tts.logger().setLevel(logging.WARN) # load filesystem values if args.directory: self.filesystem = tts.filesystem.FileSystem( os.path.abspath(args.directory)) else: self.filesystem = self.preferences.get_filesystem() if (args.parser == 'list' or args.parser == 'export') and not args.save_type: # set default args.save_type = tts.SaveType.workshop if (args.parser == 'config' and args.parser_config == 'set' and not args.mod_location and not args.tts_location): parser_config_set.error("At least one of -m or -t is required.") rc, message = args.func(args) if message: print(message) sys.exit(rc)
def __init__(self): self.preferences=tts.preferences.Preferences() parser = argparse.ArgumentParser(description="Manipulate Tabletop Simulator files") parser.add_argument("-d","--directory",help="Override TTS cache directory") parser.add_argument("-l","--loglevel",help="Set logging level",choices=['debug','info','warn','error']) subparsers = parser.add_subparsers(dest='parser',title='command',description='Valid commands.') subparsers.required=True # add list command parser_list = subparsers.add_parser('list',help="List installed mods.",description=''' List installed mods. If no id is provided, then this will return a list of all installed modules. If an id is provided, then this will list the contents of that modules. ''') group_list=parser_list.add_mutually_exclusive_group() group_list.add_argument("-w","--workshop",action="store_const",metavar='save_type',dest='save_type',const=tts.SaveType.workshop,help="List workshop files (the default).") group_list.add_argument("-s","--save",action="store_const",metavar='save_type',dest='save_type',const=tts.SaveType.save,help="List saves.") group_list.add_argument("-c","--chest",action="store_const",metavar='save_type',dest='save_type',const=tts.SaveType.chest,help="List chest files.") parser_list.add_argument("id",nargs='?',help="ID of specific mod to list details of.") parser_list.set_defaults(func=self.do_list) # export command parser_export = subparsers.add_parser('export',help="Export a mod.",description='Export a mod in a format suitible for later import.') group_export=parser_export.add_mutually_exclusive_group() group_export.add_argument("-w","--workshop",action="store_const",dest='save_type',metavar='save_type',const=tts.SaveType.workshop,help="ID is of workshop file (the default).") group_export.add_argument("-s","--save",action="store_const",dest='save_type',metavar='save_type',const=tts.SaveType.save,help="ID is of savegame file.") group_export.add_argument("-c","--chest",action="store_const",dest='save_type',metavar='save_type',const=tts.SaveType.chest,help="ID is of chest file.") parser_export.add_argument("id",help="ID of mod/name of savegame to export.") parser_export.add_argument("-o","--output",help="Location/file to export to.") parser_export.add_argument("-f","--force",action="store_true",help="Force creation of export file.") parser_export.add_argument("-d","--download",action="store_true",help="Attempt to download missing cache files. (EXPERIMENTAL)") parser_export.set_defaults(func=self.do_export) # import command parser_import = subparsers.add_parser('import',help="Import a mod.",description="Import an previously exported mod.") parser_import.add_argument("file",help="Mod pak file to import.") parser_import.set_defaults(func=self.do_import) # download command parser_download = subparsers.add_parser('download',help='Download mod files.',description='Attempt to download any missing files for an installed mod.') group_download=parser_download.add_mutually_exclusive_group() group_download.add_argument("-w","--workshop",action="store_const",dest='save_type',metavar='save_type',const=tts.SaveType.workshop,help="ID is of workshop file.") group_download.add_argument("-s","--save",action="store_const",dest='save_type',metavar='save_type',const=tts.SaveType.save,help="ID is of savegame file.") group_download.add_argument("-c","--chest",action="store_const",dest='save_type',metavar='save_type',const=tts.SaveType.chest,help="ID is of chest file.") group_download_target=parser_download.add_mutually_exclusive_group(required=True) group_download_target.add_argument("-a","--all",action="store_true",help="Download all.") group_download_target.add_argument("id",nargs='?',help="ID of mod/name of savegame to download.") parser_download.set_defaults(func=self.do_download) # cache command parser_cache = subparsers.add_parser('cache',help='Work with the cache.') subparsers_cache = parser_cache.add_subparsers(dest='parser_cache',title='cache_command',description='Valid sub-commands.') subparsers_cache.required = True parser_cache_create = subparsers_cache.add_parser('create',help='(re)create cache directory') parser_cache_create.set_defaults(func=self.do_cache_create) # config command parser_config = subparsers.add_parser('config',help='Configure tts manager.') subparsers_config = parser_config.add_subparsers(dest='parser_config',title='config_command',description='Valid sub-commands.') subparsers_config.required = True parser_config_list = subparsers_config.add_parser('list',help='List configuration.') parser_config_list.set_defaults(func=self.do_config_list) parser_config_validate = subparsers_config.add_parser('validate',help='Validate configuration.') parser_config_validate.set_defaults(func=self.do_config_validate) parser_config_reset = subparsers_config.add_parser('reset',help='Reset configuration.') parser_config_reset.set_defaults(func=self.do_config_reset) parser_config_set = subparsers_config.add_parser('set',help='Set configuration parameters.') parser_config_set.set_defaults(func=self.do_config_set) parser_config_set.add_argument("-m","--mod_location",choices=['documents','gamedata'],help="Where mods are stored.") parser_config_set.add_argument("-t","--tts_location",help="TTS Install directory") args = parser.parse_args() # set logging if args.loglevel: logmap={ 'debug':logging.DEBUG, 'info':logging.INFO, 'warn':logging.WARN, 'error':logging.ERROR } tts.logger().setLevel(logmap[args.loglevel]) else: tts.logger().setLevel(logging.WARN) # load filesystem values if args.directory: self.filesystem = tts.filesystem.FileSystem(os.path.abspath(args.directory)) else: self.filesystem = self.preferences.get_filesystem() if (args.parser=='list' or args.parser=='export') and not args.save_type: # set default args.save_type = tts.SaveType.workshop if (args.parser=='config' and args.parser_config=='set' and not args.mod_location and not args.tts_location): parser_config_set.error("At least one of -m or -t is required.") rc,message = args.func(args) if message: print(message) sys.exit(rc)
def importPak(filesystem,filename): log=tts.logger() log.debug("About to import {} into {}.".format(filename,filesystem)) if not os.path.isfile(filename): log.error("Unable to find mod pak {}".format(filename)) return False if not zipfile.is_zipfile(filename): log.error("Mod pak {} format appears corrupt.".format(filename)) return False try: with zipfile.ZipFile(filename,'r') as zf: bad_file=zf.testzip() if bad_file: log.error("At least one corrupt file found in {} - {}".format(filename,bad_file)) return False if not zf.comment: # TODO: allow overrider log.error("Missing pak header comment in {}. Aborting import.".format(filename)) return False metadata=json.loads(zf.comment.decode('utf-8')) if not tts.validate_metadata(metadata, PAK_VER): log.error(f"Invalid pak header '{metadata}' in {filename}. Aborting import.") return False log.info(f"Extracting {metadata['Type']} pak for id {metadata['Id']} (pak version {metadata['Ver']})") #select the thumbnail which matches the metadata id, else anything names = zf.namelist() thumbnails = [name for name in names if '/Thumbnails/' in name] thumbnail = None for thumbnail in thumbnails: if metadata['Id'] in os.path.basename(thumbnail): break outname=None for name in names: # Note that zips always use '/' as the seperator it seems. splitname = name.split('/') if len(splitname) > 2 and splitname[2] == 'Thumbnails': if name == thumbnail: #remove "Thumbnails" from the path outname='/'.join(splitname[0:2] + [os.path.extsep.join([metadata['Id'],'png'])]) else: outname=None continue start=splitname[0] if start=='Saves': modpath=filesystem.basepath else: modpath=filesystem.modpath log.debug(f"Extracting {name} to {modpath}") zf.extract(name,modpath) if outname: log.debug(f"Renaming {name} to {outname}") os.rename(os.path.join(modpath,name), os.path.join(modpath,outname)) try: outdir = os.path.dirname(os.path.join(modpath,name)) os.rmdir(outdir) except OSError: log.debug(f"Can't remove dir {outdir}") except zipfile.BadZipFile as e: log.error("Mod pak {} format appears corrupt - {}.".format(filename,e)) except zipfile.LargeZipFile as e: log.error("Mod pak {} requires large zip capability - {}.\nThis shouldn't happen - please raise a bug.".format(filename,e)) log.info("Imported {} successfully.".format(filename)) return True