def download(self): logger.debug("Downloading {} files.".format(self.total)) disable_progressbar = picomc.logging.debug if self.known_size: cm_progressbar = tqdm( total=self.total_size, disable=disable_progressbar, unit_divisor=1024, unit="iB", unit_scale=True, ) else: cm_progressbar = tqdm(total=self.total, disable=disable_progressbar) with cm_progressbar as tq, ThreadPoolExecutor(max_workers=self.workers) as tpe: for i, (url, dest) in enumerate(self.queue, start=1): cb = tq.update if self.known_size else (lambda x: None) fut = tpe.submit(self.download_file, i, url, dest, cb) self.fut_to_url[fut] = url try: for fut in concurrent.futures.as_completed(self.fut_to_url.keys()): self.reap_future(fut, tq) except KeyboardInterrupt as ex: self.cancel(tq, tpe) raise ex from None # Do this at the end in order to not break the progress bar. for error in self.errors: logger.error(error) return not self.errors
def download_assets(self, verify_hashes=False, force=False): """Downloads missing assets.""" hashes = dict() for obj in self.raw_asset_index["objects"].values(): hashes[obj["hash"]] = obj["size"] logger.info("Checking {} assets.".format(len(hashes))) is_virtual = self.raw_asset_index.get("virtual", False) fileset = set(recur_files(self.assets_root)) q = DownloadQueue() objpath = self.launcher.get_path(Directory.ASSET_OBJECTS) for sha in hashes: abspath = objpath / sha[0:2] / sha ok = abspath in fileset # file exists if verify_hashes: ok = ok and file_sha1(abspath) == sha if force or not ok: url = urllib.parse.urljoin(self.ASSETS_URL, posixpath.join(sha[0:2], sha)) q.add(url, abspath, size=hashes[sha]) if len(q) > 0: logger.info("Downloading {} assets.".format(len(q))) if not q.download(): logger.warning("Some assets failed to download.") if is_virtual: logger.info("Copying virtual assets") where = self.get_virtual_asset_path() logger.debug("Virtual asset path: {}".format(where)) self._populate_virtual_assets(self.raw_asset_index, where)
def prepare_assets_launch(self, gamedir): launch_asset_index = self.get_raw_asset_index_nodl(self.vspec.assets) is_map_resources = launch_asset_index.get("map_to_resources", False) if is_map_resources: logger.info("Mapping resources") where = gamedir / "resources" logger.debug("Resources path: {}".format(where)) self._populate_virtual_assets(launch_asset_index, where)
def _mcs_auth(self, uhs, xsts_token): data = {"identityToken": f"XBL3.0 x={uhs};{xsts_token}"} resp = requests.post(URL_MCS, json=data) resp.raise_for_status() j = resp.json() logger.debug("Minecraft services auth successful") return j["access_token"]
def resolve_version_name(self, v): """Takes a metaversion and resolves to a version.""" if v == "latest": v = self.manifest["latest"]["release"] logger.debug("Resolved latest -> {}".format(v)) elif v == "snapshot": v = self.manifest["latest"]["snapshot"] logger.debug("Resolved snapshot -> {}".format(v)) return v
def __init__(self, exit_stack: ExitStack, root: Path = None, debug=False): """Create a Launcher instance reusing an existing ExitStack.""" self.exit_stack = exit_stack self.debug = debug if root is None: root = get_default_root() self.root = root logger.debug("Using application directory: {}".format(self.root)) self.ensure_filesystem()
def load(self): logger.debug("Loading Config from {}".format(self.filepath)) try: with open(self.filepath, "r") as fd: data = json.load(fd) self.clear() self.update(data) return True except FileNotFoundError: return False
def get_default_java(): java_home = os.getenv("JAVA_HOME") if java_home is not None: candidates = ["java", "java.exe"] for candidate in candidates: path = os.path.join(java_home, "bin", candidate) if os.path.isfile(path): logger.debug("Detected JAVA_HOME, using as default") return path return "java"
def ensure_filesystem(self): """Create directory structure for the application.""" for d in DIRECTORY_MAP: path = self.get_path(d) try: path.mkdir(parents=True) except FileExistsError: pass else: logger.debug("Created dir: {}".format(path)) self.write_profiles_dummy()
def _ms_oauth(self): data = {"client_id": CLIENT_ID, "scope": SCOPE} resp = requests.post(URL_DEVICE_AUTH, data) resp.raise_for_status() j = resp.json() device_code = j["device_code"] msg = j["message"] user_code = j["user_code"] link = j["verification_uri"] msg = msg.replace( user_code, colorama.Fore.RED + user_code + colorama.Fore.RESET).replace( link, colorama.Style.BRIGHT + link + colorama.Style.NORMAL) logger.info(msg) data = { "code": device_code, "grant_type": GRANT_TYPE, "client_id": CLIENT_ID } first = True while True: if first: input("Press enter to continue... ") else: input("Press enter to try again... ") first = False resp = requests.post(URL_TOKEN, data) if resp.status_code == 400: j = resp.json() logger.debug(j) if j["error"] == "authorization_pending": logger.warning(j["error_description"]) logger.info(msg) continue else: raise AuthenticationError(j["error_description"]) resp.raise_for_status() j = resp.json() break access_token = j["access_token"] refresh_token = j["refresh_token"] logger.debug("OAuth device code flow successful") return access_token, refresh_token
def copy_libraries(ctx): lib_path = ctx.install_profile["path"] if lib_path is None: # 1.17 forge jar is no longer packaged in the installer but it can # be downloaded like the rest logger.debug("Forge lib not bundled in installer, skipping copy") return libdir_relative = Artifact.make(lib_path).path.parent srcdir = ctx.extract_dir / "maven" / libdir_relative dstdir = ctx.libraries_dir / libdir_relative dstdir.mkdir(parents=True, exist_ok=True) for f in srcdir.iterdir(): shutil.copy2(f, dstdir)
def extract(self): dedup = set() for library in self.natives: fullpath = library.get_abspath(self.libraries_root) if fullpath in dedup: logger.debug("Skipping duplicate natives archive: " "{}".format(fullpath)) continue dedup.add(fullpath) logger.debug("Extracting natives archive: {}".format(fullpath)) with zipfile.ZipFile(fullpath) as zf: # TODO take exclude into account zf.extractall(path=self.ndir)
def _ms_oauth_refresh(self, refresh_token): data = { "refresh_token": refresh_token, "grant_type": "refresh_token", "client_id": CLIENT_ID, } resp = requests.post(URL_TOKEN, data) resp.raise_for_status() j = resp.json() access_token = j["access_token"] refresh_token = j["refresh_token"] logger.debug("OAuth code flow refresh successful") return access_token, refresh_token
def _xsts_auth(self, xbl_token): data = { "Properties": { "SandboxId": "RETAIL", "UserTokens": [xbl_token] }, "RelyingParty": "rp://api.minecraftservices.com/", "TokenType": "JWT", } resp = requests.post(URL_XSTS, json=data) resp.raise_for_status() j = resp.json() logger.debug("XSTS auth successful") return j["Token"]
def _xbl_auth(self, access_token): data = { "Properties": { "AuthMethod": "RPS", "SiteName": "user.auth.xboxlive.com", "RpsTicket": f"d={access_token}", }, "RelyingParty": "http://auth.xboxlive.com", "TokenType": "JWT", } resp = requests.post(URL_XBL, json=data) resp.raise_for_status() j = resp.json() logger.debug("XBL auth successful") return j["Token"], j["DisplayClaims"]["xui"][0]["uhs"]
def get_default_root(): logger.debug("Resolving default application root") platforms = { "linux": lambda: Path("~/.local/share/picomc").expanduser(), "win32": lambda: get_appdata() / ".picomc", "darwin": lambda: Path("~/Library/Application Support/picomc").expanduser(), } if sys.platform in platforms: return platforms[sys.platform]() else: # This is probably better than nothing and should be fine on most # widely-used platforms other than the supported ones. Too bad in # case of something exotic. Minecraft doesn't run on those anyway. return Path("~/.picomc").expanduser()
def refresh(self, force=False): if not self.is_authenticated: raise RefreshError("Account is not authenticated, cannot refresh") try: valid = self._am.msapi.validate(self.access_token) except ValidationError as e: raise RefreshError(e) if valid: logger.debug("msa: token still valid") return False else: logger.debug("msa: token not valid anymore, refreshing") self.access_token, self.refresh_token = self._am.msapi.refresh( self.refresh_token ) self.save() return True
def get_raw_asset_index(self, asset_index_spec): iid = asset_index_spec["id"] url = asset_index_spec["url"] sha1 = asset_index_spec["sha1"] fpath = self.launcher.get_path(Directory.ASSET_INDEXES, "{}.json".format(iid)) if fpath.exists() and file_sha1(fpath) == sha1: logger.debug("Using cached asset index, hash matches vspec") with open(fpath) as fp: return json.load(fp) try: logger.debug("Downloading new asset index") raw = requests.get(url).content with open(fpath, "wb") as fp: fp.write(raw) return json.loads(raw) except requests.ConnectionError: die("Failed to retrieve asset index.")
def download_file(self, i, url, dest, sz_callback): # In case the task could not be cancelled if self.stop_event.is_set(): raise InterruptedError os.makedirs(os.path.dirname(dest), exist_ok=True) logger.debug("Downloading [{}/{}]: {}".format(i, self.total, url)) resp = self.http_pool.request("GET", url, preload_content=False) if resp.status != 200: self.errors.append( "Failed to download ({}) [{}/{}]: {}".format( resp.status, i, self.total, url ) ) resp.release_conn() return with DlTempFile(dir=os.path.dirname(dest), delete=False) as tempf: self.copyfileobj_prog(resp, tempf, sz_callback) tempf.close() os.replace(tempf.name, dest) resp.release_conn()
def get_raw_vspec(self): vspec_path = (self.versions_root / self.version_name / "{}.json".format(self.version_name)) if not self.version_manifest: if vspec_path.exists(): logger.debug("Found custom vspec ({})".format( self.version_name)) with open(vspec_path) as fp: return json.load(fp) else: die("Specified version ({}) not available".format( self.version_name)) url = self.version_manifest["url"] sha1 = self.version_manifest["sha1"] if vspec_path.exists() and file_sha1(vspec_path) == sha1: logger.debug( "Using cached vspec files, hash matches manifest ({})".format( self.version_name)) with open(vspec_path) as fp: return json.load(fp) try: logger.debug("Downloading vspec file") raw = requests.get(url).content vspec_path.parent.mkdir(parents=True, exist_ok=True) with open(vspec_path, "wb") as fp: fp.write(raw) j = json.loads(raw) return j except requests.ConnectionError: die("Failed to retrieve version json file. Check your internet connection." )
def install_113(ctx: ForgeInstallContext): vspec = make_base_vspec(ctx) # Find out if the installer is of new format by checking if InstallV1 class exists is_wrapper_new = (ctx.extract_dir / INSTALLV1_CLASS).exists() wrapper = FORGE_WRAPPER_NEW if is_wrapper_new else FORGE_WRAPPER original_main_class = vspec["mainClass"] vspec["libraries"] = [wrapper["library"]] + vspec["libraries"] vspec["mainClass"] = wrapper["mainClass"] if is_wrapper_new: logger.debug("Using new PicoForgeWrapper") if not "jvm" in vspec["arguments"]: vspec["arguments"]["jvm"] = list() vspec["arguments"]["jvm"] += [ f"-Dpicomc.mainClass={original_main_class}" ] if _version_as_tuple(ctx.forge_version) >= (37, 0, 0): found = None for i, arg in enumerate(vspec["arguments"]["jvm"]): if arg.startswith("-DignoreList"): found = i break if found is not None: logger.debug("Found -DignoreList, extending.") vspec["arguments"]["jvm"][i] += r",${jar_name}.jar" else: logger.warn( "Could not locate -DignoreList arg, something is probably wrong. The game may not work." ) logger.debug("Adding export to jvm args.") vspec["arguments"]["jvm"] += [ "--add-exports", "cpw.mods.bootstraplauncher/cpw.mods.bootstraplauncher=ALL-UNNAMED", ] for install_lib in ctx.install_profile["libraries"]: install_lib["presenceOnly"] = True vspec["libraries"].append(install_lib) save_vspec(ctx, vspec) copy_libraries(ctx) installer_descriptor = f"net.minecraftforge:forge:{ctx.version}:installer" installer_libpath = ctx.libraries_dir / Artifact.make( installer_descriptor).path os.makedirs(installer_libpath.parent, exist_ok=True) shutil.copy(ctx.installer_file, installer_libpath)
def get_jarfile_dl(self, verify_hashes=False, force=False): """Checks existence and hash of cached jar. Returns None if ok, otherwise returns download (url, size)""" logger.debug("Attempting to use jarfile: {}".format(self.jarfile)) dlspec = self.vspec.downloads.get("client", None) if dlspec is None: logger.debug("jarfile dlspec not availble, skipping hash check.") if not self.jarfile.exists(): die("jarfile does not exist and can not be downloaded.") return logger.debug("Checking jarfile.") if (force or not self.jarfile.exists() # The fabric-installer places an empty jarfile here, due to some # quirk of an old (git blame 2 years) version of the vanilla launcher. # https://github.com/FabricMC/fabric-installer/blob/master/src/main/java/net/fabricmc/installer/client/ClientInstaller.java#L49 or os.path.getsize(self.jarfile) == 0 or (verify_hashes and file_sha1(self.jarfile) != dlspec["sha1"])): logger.info( "Jar file ({}) will be downloaded with libraries.".format( self.jarname)) return dlspec["url"], dlspec.get("size", None)
def __exit__(self, ext_type, exc_value, traceback): logger.debug("Cleaning up natives.") shutil.rmtree(self.ndir)
def commit_all_dirty(self): logger.debug("Commiting all dirty configs") for conf in self.configs.values(): conf.save_if_dirty()
def save(self): logger.debug("Writing Config to {}".format(self.filepath)) os.makedirs(os.path.dirname(self.filepath), exist_ok=True) with open(self.filepath, "w") as fd: json.dump(self, fd, indent=4)
def install_from_zip(zipfileobj, launcher, instance_manager, instance_name=None): with ZipFile(zipfileobj) as pack_zf: for fileinfo in pack_zf.infolist(): fpath = PurePath(fileinfo.filename) if fpath.parts[-1] == "manifest.json" and len(fpath.parts) <= 2: manifest_zipinfo = fileinfo archive_prefix = fpath.parent break else: raise ValueError("Zip file does not contain manifest") with pack_zf.open(manifest_zipinfo) as fd: manifest = json.load(fd) assert manifest["manifestType"] == "minecraftModpack" assert manifest["manifestVersion"] == 1 assert len(manifest["minecraft"]["modLoaders"]) == 1 forge_ver = manifest["minecraft"]["modLoaders"][0]["id"] assert forge_ver.startswith(FORGE_PREFIX) forge_ver = forge_ver[len(FORGE_PREFIX):] packname = manifest["name"] packver = manifest["version"] if instance_name is None: instance_name = "{}-{}".format(sanitize_name(packname), sanitize_name(packver)) logger.info(f"Installing {packname} version {packver}") else: logger.info( f"Installing {packname} version {packver} as instance {instance_name}" ) if instance_manager.exists(instance_name): die("Instace {} already exists".format(instance_name)) try: forge.install( versions_root=launcher.get_path(Directory.VERSIONS), libraries_root=launcher.get_path(Directory.LIBRARIES), forge_version=forge_ver, ) except forge.AlreadyInstalledError: pass # Trusting the game version from the manifest may be a bad idea inst = instance_manager.create( instance_name, "{}-forge-{}".format(manifest["minecraft"]["version"], forge_ver), ) # This is a random guess, but better than the vanilla 1G inst.config["java.memory.max"] = "4G" project_files = { mod["projectID"]: mod["fileID"] for mod in manifest["files"] } headers = {"User-Agent": "curl"} dq = DownloadQueue() logger.info("Retrieving mod metadata from curse") modcount = len(project_files) mcdir: Path = inst.get_minecraft_dir() moddir = mcdir / "mods" with tqdm(total=modcount) as tq: # Try to get as many file_infos as we can in one request # This endpoint only provides a few "latest" files for each project, # so it's not guaranteed that the response will contain the fileID # we are looking for. It's a gamble, but usually worth it in terms # of request count. The time benefit is not that great, as the endpoint # is slow. resp = requests.post(ADDON_URL, json=list(project_files.keys()), headers=headers) resp.raise_for_status() projects_meta = resp.json() for proj in projects_meta: proj_id = proj["id"] want_file = project_files[proj_id] for file_info in proj["latestFiles"]: if want_file == file_info["id"]: dq.add( file_info["downloadUrl"], moddir / file_info["fileName"], size=file_info["fileLength"], ) del project_files[proj_id] batch_recvd = modcount - len(project_files) logger.debug("Got {} batched".format(batch_recvd)) tq.update(batch_recvd) with ThreadPoolExecutor(max_workers=16) as tpe: def dl(pid, fid): resp = requests.get(GETINFO_URL.format(pid, fid), headers=headers) resp.raise_for_status() file_info = resp.json() assert file_info["id"] == fid dq.add( file_info["downloadUrl"], moddir / file_info["fileName"], size=file_info["fileLength"], ) # Get remaining individually futmap = {} for pid, fid in project_files.items(): fut = tpe.submit(dl, pid, fid) futmap[fut] = (pid, fid) for fut in concurrent.futures.as_completed(futmap.keys()): try: fut.result() except Exception as ex: pid, fid = futmap[fut] logger.error( "Could not get metadata for {}/{}: {}".format( pid, fid, ex)) else: tq.update(1) logger.info("Downloading mod jars") dq.download() logger.info("Copying overrides") overrides = archive_prefix / manifest["overrides"] for fileinfo in pack_zf.infolist(): if fileinfo.is_dir(): continue fname = fileinfo.filename try: outpath = mcdir / PurePath(fname).relative_to(overrides) except ValueError: continue if not outpath.parent.exists(): outpath.parent.mkdir(parents=True, exist_ok=True) with pack_zf.open(fileinfo) as infile, open(outpath, "wb") as outfile: shutil.copyfileobj(infile, outfile) logger.info("Done installing {}".format(instance_name))
def _exec_mc(self, account, v, java, java_info, gamedir, libraries, natives, verify_hashes): libs = [lib.get_abspath(self.libraries_root) for lib in libraries] libs.append(v.jarfile) classpath = join_classpath(*libs) version_type, user_type = (("picomc", "mojang") if account.online else ("picomc/offline", "offline")) mc = v.vspec.mainClass if hasattr(v.vspec, "minecraftArguments"): mcargs = shlex.split(v.vspec.minecraftArguments) sjvmargs = [ "-Djava.library.path={}".format(natives), "-cp", classpath ] elif hasattr(v.vspec, "arguments"): mcargs, jvmargs = process_arguments(v.vspec.arguments, java_info) sjvmargs = [] for a in jvmargs: tmpl = Template(a) res = tmpl.substitute( natives_directory=natives, launcher_name="picomc", launcher_version=picomc.__version__, classpath=classpath, ) sjvmargs.append(res) try: account.refresh() except requests.exceptions.ConnectionError: logger.warning( "Failed to refresh account due to a connectivity error. Continuing." ) smcargs = [] for a in mcargs: tmpl = Template(a) res = tmpl.substitute( auth_player_name=account.gname, auth_uuid=account.uuid, auth_access_token=account.access_token, # Only used in old versions. auth_session="token:{}:{}".format(account.access_token, account.uuid), user_type=user_type, user_properties={}, version_type=version_type, version_name=v.version_name, game_directory=gamedir, assets_root=self.assets_root, assets_index_name=v.vspec.assets, game_assets=v.get_virtual_asset_path(), ) smcargs.append(res) my_jvm_args = [ "-Xms{}".format(self.config["java.memory.min"]), "-Xmx{}".format(self.config["java.memory.max"]), ] if verify_hashes: my_jvm_args.append("-Dpicomc.verify=true") my_jvm_args += shlex.split(self.config["java.jvmargs"]) fargs = [java] + sjvmargs + my_jvm_args + [mc] + smcargs if logging.debug: logger.debug("Launching: " + shlex.join(fargs)) else: logger.info("Launching the game") subprocess.run(fargs, cwd=gamedir)