def update_pack(in_path, pack_id, pack_version, pack_version_readable, pack_author): logger.info("Attempting update of pack: %s" % pack_id) config = configLoader.get_global_config() if not pack_manipulation.populate_pack_files(in_path): logger.error("Pack files could not be copied. Pack will not be added.") return False pack_path = opath.join(config.packDir, pack_id) if fileHelper.dir_exists(opath.join(pack_path, str(pack_version))): logger.error("The version '%s' is already defined on the server" % str(pack_version)) return False if not fileHelper.dir_exists(pack_path): logger.error( "Pack cannot be updated becuase it doesn't exist. Use the 'add_pack' function instead." ) return False version_data = pack_manipulation.build_version_data(in_path) configLoader.save_configuration( opath.join(pack_path, str(pack_version), "files.json"), version_data) configLoader.save_configuration( opath.join(pack_path, str(pack_version), "version.json"), { "version": pack_version, "version_name": pack_version_readable, "authors": pack_author.split(";") }) pack_data = configLoader.get_configuration(opath.join( pack_path, "pack.json"), update_config_file=False) pack_data["latest_version"] = pack_version pack_data["authors"] = [i for i in set(pack_data.get("authors", []))] configLoader.save_configuration(opath.join(pack_path, "pack.json"), pack_data) logger.info("Pack updated successfully") return True
def build_version_data(directory): """ :param directory: Directory of pack :return: Nested dictionary of directories and files, with file hashes """ config = configLoader.get_global_config() if not fileHelper.dir_exists(directory): logger.error("Directory does not exist at r'%s'" % directory) return False logger.info("Scanning r'%s' for files" % directory) files = [os.path.relpath(os.path.join(dp, f), directory).replace("\\", "/") for dp, dn, fn in os.walk(directory) for f in fn] logger.info("Checking files against patterns") files = match_files_against_patterns(files, [re_compile(p) for p in configLoader.get_list(config.patterns, "mods/+.? config/+.? scripts/+.?".split(" "))]) logger.info("Loading file:hash index") file_index = configLoader.get_configuration(config.fileIndex, default=None, fmt=configLoader.PropertiesFormat, update_config_file=False) file_index = {file_index[k]: k for k in file_index} logger.info("Building file object tree") root = Directory(name=os.path.split(directory)[-1]) for file in files: parts = Path(file).parts logger.debug("Adding r'%s' to file tree" % file) try: hash = file_index[parts[-1]] except KeyError: logger.error("No hash saved for r'%s'" % file) continue last = root for part in parts[:-1]: last = last.add(Directory(name=part)) last.add(File(name=parts[-1], hash=hash)) logger.info("Compiling object tree to json") output = root.build() return output
def add_pack(in_path, pack_id, pack_name, pack_version, pack_version_readable, pack_author): logger.info("Attempting to add pack: %s" % pack_id) config = configLoader.get_global_config() pack_path = opath.join(config.packDir, pack_id) if fileHelper.dir_exists(pack_path): logger.error( "Pack could not be added as it already exists. Use the 'update_pack' function instead." ) return False configLoader.save_configuration( opath.join(pack_path, "pack.json"), { "id": pack_id, "name": pack_name, "latest_version": pack_version, "authors": pack_author.split(";") }) logger.info( "Generated pack data files. Running update to generate file list for initial version" ) return update_pack(in_path, pack_id, pack_version, pack_version_readable, pack_author)
def populate_pack_files(directory): """ :param directory: Directory of pack :return: Boolean - whether the pack was indexed and copied to the files directory """ config = configLoader.get_global_config() if not fileHelper.dir_exists(directory): logger.error("Directory does not exist at r'%s'" % directory) return False logger.info("Scanning r'%s' for files" % directory) files = [os.path.relpath(os.path.join(dp, f), directory).replace("\\", "/") for dp, dn, fn in os.walk(directory) for f in fn] logger.info("Checking files against patterns") files = match_files_against_patterns(files, [re_compile(p) for p in configLoader.get_list(config.patterns, "mods/+.? config/+.? scripts/+.?".split(" "))]) file_index = configLoader.get_configuration(config.fileIndex, default=None, fmt=configLoader.PropertiesFormat, update_config_file=False) fileHelper.ensureDirectory(config.fileDir) logger.info("Indexing files...") for f in files: try: with open(os.path.join(directory, f), 'rb') as fobj: hash = hashHelper.md5(fobj) except IOError: logger.error("Could not calculate hash for r'%s'" % os.path.join(directory, f)) return False if not fileHelper.file_exists(os.path.join(config.fileDir, hash)): logger.debug("Copying file r'%s' with hash %s" % (f, hash)) shutil.copy(os.path.join(directory, f), os.path.join(config.fileDir, hash)) file_index[hash] = os.path.split(f)[-1] else: logger.debug("File r'%s' is already saved. Skipping file." % f) logger.info("Files indexed successfully") configLoader.save_configuration(config.fileIndex, file_index, configLoader.PropertiesFormat) return True
def get_update(pack_id, current_version, target_version): logger.info("Processing update from '%s' to '%s' for pack: %s" % (current_version, target_version, pack_id)) config = configLoader.get_global_config() pack_path = opath.join(config.packDir, pack_id) if not fileHelper.dir_exists(pack_path): logger.error( "Pack '%s' is not saved on the server. Cannot update pack" % pack_id) return None try: ctree = fileHelper.readFile( opath.join(config.packDir, pack_id, str(current_version), "files.json")) except IOError: logger.error("No update matching version %s was found" % str(current_version)) return None try: ttree = fileHelper.readFile( opath.join(config.packDir, pack_id, str(target_version), "files.json")) except IOError: logger.error("No update matching version %s was found" % str(target_version)) return None toadd, toremove = fileTree.compare_trees( fileTree.build_tree_from_json(loads(ctree)), fileTree.build_tree_from_json(loads(ttree))) logger.info("Client has %i files to download, %i to remove" % (len(toadd), len(toremove))) zip_buffer = BytesIO() with zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False) as zip_file: for file in toadd: try: zip_file.writestr( file.hash, open(opath.join("files", file.hash), "rb").read()) except IOError: logger.error("Error adding file to ZIP (r'%s')" % file.path) return None zip_file.writestr( "update.json", dumps( { "additions": [{ "hash": i.hash, "path": i.path } for i in toadd], "deletions": [i.path for i in toremove], "version": configLoader.get_configuration( opath.join(config.packDir, pack_id, str(target_version), "version.json")), "pack": configLoader.get_configuration( opath.join(config.packDir, pack_id, "pack.json")) }, indent=4)) zip_buffer.seek(0) output = zip_buffer.read() zip_buffer.close() logger.debug("ZIP file of %s created" % fileHelper.sizeof_fmt(len(output))) return output