def download_index(self, asset_id): ''' Downloads the JSON asset index specified by 'asset_id' ''' asset_url_object = URL("indexes/" + asset_id + ".json", URL.DOWNLOAD).url_object() asset_index_path = str(self.BASE_PATH.joinpath("indexes/" + asset_id + ".json")) FileTools.write_object(asset_index_path, asset_url_object)
def download_missing(self, asset_id, progress_function=None): ''' Downloads assets specified in 'asset_id' that are not already downloaded ''' asset_info = FileTools.read_json(str(self.get_paths(asset_id)["index"])) asset_list = asset_info["objects"] if not (progress_function == None): progress_function("Finding missing assets", 0) asset_dict = dict() if (self._is_virtual(asset_info)): assets_base_path = self.BASE_PATH.joinpath("virtual/" + asset_id) if not assets_base_path.exists(): for asset_name in asset_list.keys(): asset_url = URL([asset_list[asset_name]["hash"][:2], asset_list[asset_name]["hash"]], URL.RESOURCES) asset_dict[asset_url] = str(assets_base_path.joinpath(asset_name)) else: for asset in asset_list.values(): asset_relative_path = [asset["hash"][:2], asset["hash"]] asset_url = URL(asset_relative_path, URL.RESOURCES) asset_path = self.BASE_PATH.joinpath(*(["objects"] + asset_relative_path)) if not asset_path.exists(): asset_dict[asset_url] = str(asset_path) asset_count = 0 asset_total_count = len(asset_dict) for asset_url in asset_dict.keys(): asset_url_object = asset_url.url_object() FileTools.write_object(asset_dict[asset_url], asset_url_object) asset_count += 1 if not (progress_function == None): progress_function("Downloading assets", asset_count/asset_total_count)
def download_official(self, version_id): if self.version_exists(version_id, "vanilla"): raise Exception("Version " + version_id + " already exists") # TODO: More appropriate exception paths_dict = self.get_paths(version_id, "vanilla") FileTools.write_object(paths_dict["jar"], URL(["versions", version_id, version_id + ".jar"], URL.DOWNLOAD).url_object()) FileTools.write_object(paths_dict["json"], URL(["versions", version_id, version_id + ".json"], URL.DOWNLOAD).url_object()) current_listing = dict() current_listing["type"] = "vanilla" current_listing["name"] = version_id self.index.append(current_listing) self._flush_index()
def _download_library(self, library_metadata): if self.download_exclusive and not library_metadata.current_system_supported(): return if library_metadata.is_natives(): if self.download_exclusive: all_extensions = [library_metadata.get_current_system_natives_extension()] else: all_extensions = library_metadata.get_all_natives_extensions() natives_list = all_extensions if self.is_library_existant(library_metadata): if library_metadata.is_natives(): natives_list = list() for current_extension in all_extensions: if not self.is_natives_existant(library_metadata, current_extension): natives_list.append(current_extension) if natives_list == list(): return # Natives already exists else: return # Library already exists if library_metadata.is_natives(): download_list = library_metadata.get_download_list(natives_list) else: download_list = library_metadata.get_download_list() for current_library in download_list: current_tries = 1 while current_tries <= 3: correct_hash = current_library["hash"].url_object().read().decode("UTF-8") FileTools.write_object(str(self.BASE_PATH.joinpath(current_library["path"])), current_library["url"].url_object()) hasher = hashlib.sha1() hasher.update(open(str(self.BASE_PATH.joinpath(current_library["path"])), mode="rb").read()) if hasher.hexdigest() == correct_hash: if library_metadata.is_natives(): natives_directory = self.BASE_PATH.joinpath(current_library["path"].parent.joinpath(current_library["natives_extension"])) jar_path = str(self.BASE_PATH.joinpath(current_library["path"])) FileTools.extract_jar_files(FileTools.get_jar_object(jar_path), str(natives_directory), library_metadata.get_natives_exclude()) FileTools.delete_and_clean(jar_path) break else: current_tries += 1 if current_tries == 3: raise Exception("Failed to download library " + library_metadata.get_id()) # TODO: More appropriate exception self.index[library_metadata.get_id()] = dict() if library_metadata.is_natives(): self.index[library_metadata.get_id()]["path"] = download_list[0]["path"].parent.parts else: self.index[library_metadata.get_id()]["path"] = download_list[0]["path"].parts