def delete(self, profile_name): ''' Deletes profile 'profile_name' ''' if not profile_name in self.index: raise Exception("Profile " + str(profile_name) + " does not exist") # TODO: More appropriate exception FileTools.delete_and_clean(str(self.BASE_PATH.joinpath(*self.index[profile_name]["directory"]))) del self.index[profile_name] self._flush_index()
def delete(self, version_id, version_type): if not self.version_exists(version_id, version_type): raise Exception("Version does not exist") # TODO: More appropriate exception index_count = 0 for current_version in self.index: if current_version["type"] == version_type and current_version["name"] == version_id: break else: index_count += 1 del self.index[index_count] self._flush_index() FileTools.delete_and_clean(str(self.get_paths(version_id, version_type)["directory"]))
def rename(self, current_version_id, new_version_id): if not self.version_exists(current_version_id, "custom"): raise Exception("Cannot rename: " + current_version_id + " does not exist") if self.version_exists(new_version_id, "custom"): raise Exception("Cannot rename: " + new_version_id + " already exists") old_id_paths = self.get_paths(current_version_id, "custom") new_id_paths = self.get_paths(new_version_id, "custom") FileTools.add_missing_dirs(new_id_paths["jar"]) for file_type in ["jar", "json"]: FileTools.move(old_id_paths[file_type], new_id_paths[file_type]) FileTools.delete_and_clean(str(old_id_paths["directory"])) index_listing = self.index[self._get_version_index(current_version_id, "custom")] index_listing["name"] = new_version_id self._flush_index()
def _remove_unused_objects(self, asset_id_list): ''' Removes assets from the 'objects' folder that are not present in the indexes 'asset_id_list' This does not include virtual assets ''' used_hash_list = list() for asset_id in asset_id_list: current_assets = FileTools.read_json(str(self.get_paths(asset_id)["index"]))["objects"] for resource in current_assets.keys(): current_hash = current_assets[resource]["hash"] if not current_hash in used_hash_list: used_hash_list.append(current_hash) for prefix_hash_dir in self.BASE_PATH.joinpath("objects").iterdir(): for hash_file in prefix_hash_dir.iterdir(): if not hash_file.name in used_hash_list: FileTools.delete_and_clean(str(hash_file))
def _download_library(self, library_metadata): if self.download_exclusive and not library_metadata.current_system_supported(): return if library_metadata.is_natives(): if self.download_exclusive: all_extensions = [library_metadata.get_current_system_natives_extension()] else: all_extensions = library_metadata.get_all_natives_extensions() natives_list = all_extensions if self.is_library_existant(library_metadata): if library_metadata.is_natives(): natives_list = list() for current_extension in all_extensions: if not self.is_natives_existant(library_metadata, current_extension): natives_list.append(current_extension) if natives_list == list(): return # Natives already exists else: return # Library already exists if library_metadata.is_natives(): download_list = library_metadata.get_download_list(natives_list) else: download_list = library_metadata.get_download_list() for current_library in download_list: current_tries = 1 while current_tries <= 3: correct_hash = current_library["hash"].url_object().read().decode("UTF-8") FileTools.write_object(str(self.BASE_PATH.joinpath(current_library["path"])), current_library["url"].url_object()) hasher = hashlib.sha1() hasher.update(open(str(self.BASE_PATH.joinpath(current_library["path"])), mode="rb").read()) if hasher.hexdigest() == correct_hash: if library_metadata.is_natives(): natives_directory = self.BASE_PATH.joinpath(current_library["path"].parent.joinpath(current_library["natives_extension"])) jar_path = str(self.BASE_PATH.joinpath(current_library["path"])) FileTools.extract_jar_files(FileTools.get_jar_object(jar_path), str(natives_directory), library_metadata.get_natives_exclude()) FileTools.delete_and_clean(jar_path) break else: current_tries += 1 if current_tries == 3: raise Exception("Failed to download library " + library_metadata.get_id()) # TODO: More appropriate exception self.index[library_metadata.get_id()] = dict() if library_metadata.is_natives(): self.index[library_metadata.get_id()]["path"] = download_list[0]["path"].parent.parts else: self.index[library_metadata.get_id()]["path"] = download_list[0]["path"].parts
def delete(self, asset_id): ''' Deletes assets 'asset_id' ''' asset_paths = self.get_paths(asset_id) if self._is_virtual(FileTools.read_json(str(asset_paths["index"]))): FileTools.delete_and_clean(str(asset_paths["directory"])) FileTools.delete_and_clean(str(asset_paths["index"])) else: FileTools.delete_and_clean(str(asset_paths["index"])) self._remove_unused_objects(self._get_indexes())
def delete(self, library_id): if not library_id in self.index: raise Exception("Library is not existant") # TODO: More appropriate exception FileTools.delete_and_clean(str(self.BASE_PATH.joinpath(self.get_library_path(library_id)))) del self.index[library_id] self._flush_index()