def download_via_url(args, pubfile): sess = make_requests_session() fstream = sess.get(pubfile['file_url'], stream=True) total_size = int(pubfile['file_size']) relpath = sanitizerelpath(pubfile['filename']) if args.no_directories: relpath = os.path.basename(relpath) relpath = os.path.join(args.output, relpath) filepath = os.path.abspath(relpath) ensure_dir(filepath) with open(filepath, 'wb') as fp: if not args.no_progress and sys.stderr.isatty(): pbar = tqdm(total=total_size, unit='B', unit_scale=True) gevent.spawn(pbar.gevent_refresh_loop) else: pbar = fake_tqdm() LOG.info('Downloading to {} ({})'.format( relpath, fmt_size(total_size), )) for chunk in iter(lambda: fstream.raw.read(1024**2), b''): fp.write(chunk) pbar.update(len(chunk)) pbar.close()
def download_to(self, target, no_make_dirs=False, pbar=None): relpath = sanitizerelpath(self.filename) if no_make_dirs: relpath = os.path.basename(relpath) relpath = os.path.join(target, relpath) filepath = os.path.abspath(relpath) ensure_dir(filepath) checksum = self.file_mapping.sha_content.hex() with open(filepath, 'wb') as fp: self._LOG.info('Downloading to {} ({}, {})'.format( relpath, fmt_size(self.size), checksum)) for chunk in self.chunks: data = self.manifest.cdn_client.get_chunk( self.manifest.app_id, self.manifest.depot_id, chunk.sha.hex(), ) fp.write(data) if pbar: pbar.update(len(data))
def download_to(self, target, no_make_dirs=False, pbar=None, verify=True): relpath = sanitizerelpath(self.filename) if no_make_dirs: relpath = os.path.basename(relpath) relpath = os.path.join(target, relpath) filepath = os.path.abspath(relpath) ensure_dir(filepath) checksum = self.file_mapping.sha_content.hex() # don't bother verifying if file doesn't already exist if not os.path.exists(filepath): verify = False with open(filepath, 'r+b' if verify else 'wb') as fp: fp.seek(0, 2) # pre-allocate space if fp.tell() != self.size: newsize = fp.truncate(self.size) if newsize != self.size: raise SteamError( "Failed allocating space for {}".format(filepath)) # self._LOG.info('{} {} ({}, sha1:{})'.format( # 'Verifying' if verify else 'Downloading', # relpath, # fmt_size(self.size), # checksum # )) fp.seek(0) for chunk in self.chunks: # verify chunk sha hash if verify: cur_data = fp.read(chunk.cb_original) if sha1_hash(cur_data) == chunk.sha: if pbar: pbar.update(chunk.cb_original) continue fp.seek(chunk.offset) # rewind before write # download and write chunk data = self.manifest.cdn_client.get_chunk( self.manifest.app_id, self.manifest.depot_id, chunk.sha.hex(), ) fp.write(data) if pbar: pbar.update(chunk.cb_original)
def vpkfile_download_to(vpk_path, vpkfile, target, no_make_dirs, pbar): relpath = sanitizerelpath(vpkfile.filepath) if no_make_dirs: relpath = os.path.basename(relpath) relpath = os.path.join( target, # output directory vpk_path[:-4], # vpk path with extention (e.g. pak01_dir) relpath) # vpk relative path filepath = os.path.abspath(relpath) ensure_dir(filepath) LOG.info("Downloading VPK file to {} ({}, crc32:{})".format( relpath, fmt_size(vpkfile.file_length), vpkfile.crc32, )) with open(filepath, 'wb') as fp: for chunk in iter(lambda: vpkfile.read(16384), b''): fp.write(chunk) if pbar: pbar.update(len(chunk))
def download_file(args, sess, file, pbar_size, pbar_files): fstream = sess.get(file.url, stream=True) filename = file.filename if fstream.status_code != 200: LOG.error("Failed to download: {}".format(filename)) return relpath = sanitizerelpath(filename) # ensure there is a / after %vars%, and replace % with _ relpath = re.sub(r'^%([A-Za-z0-9]+)%', r'_\1_/', relpath) relpath = os.path.join(args.output, relpath) filepath = os.path.abspath(relpath) ensure_dir(filepath) with open(filepath, 'wb') as fp: for chunk in iter(lambda: fstream.raw.read(8388608), b''): fp.write(chunk) pbar_size.update(len(chunk)) pbar_files.update(1)
def download_via_url(args, url, filename): sess = make_requests_session() fstream = sess.get(url, stream=True) total_size = int(fstream.headers.get('Content-Length', 0)) relpath = sanitizerelpath(filename) if args.no_directories: relpath = os.path.basename(relpath) relpath = os.path.join(args.output, relpath) filepath = os.path.abspath(relpath) ensure_dir(filepath) with open(filepath, 'wb') as fp: if not args.no_progress and sys.stderr.isatty(): pbar = tqdm(total=total_size, mininterval=0.5, maxinterval=1, miniters=1024**3 * 10, unit='B', unit_scale=True) gevent.spawn(pbar.gevent_refresh_loop) else: pbar = fake_tqdm() # LOG.info('Downloading to {} ({})'.format( # relpath, # fmt_size(total_size) if total_size else 'Unknown size', # )) for chunk in iter(lambda: fstream.raw.read(8388608), b''): fp.write(chunk) pbar.update(len(chunk)) pbar.close()