def get_progress_info(): """Return a function callback to update the progressbar.""" progressinfo = struct("ProgressInfo", ["callback", "finish"]) if progressbar: bar = progressbar.ProgressBar(widgets=[ progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.FileTransferSpeed(), ' ', progressbar.DataSize(), '/', progressbar.DataSize('max_value'), ' ', progressbar.Timer(), ' ', progressbar.AdaptiveETA(), ]) def _callback(total_size, completed): if not hasattr(bar, "next_update"): if hasattr(bar, "maxval"): bar.maxval = total_size else: bar.max_value = total_size bar.start() bar.update(completed) def _finish(): if hasattr(bar, "next_update"): return bar.finish() return progressinfo(callback=_callback, finish=_finish) else: return progressinfo(callback=None, finish=lambda: True)
def initializee_progress_bar(max_value): if max_value is None: max_value = progressbar.UnknownLength widgets = [ progressbar.Percentage(), ' (', progressbar.DataSize(), ' of', progressbar.DataSize('max_value'), ' )', progressbar.AdaptiveTransferSpeed(), progressbar.Bar(marker='█'), progressbar.Timer(), ' ', progressbar.AdaptiveETA() ] return progressbar.DataTransferBar(max_value=max_value, widgets=widgets)
async def write_streaming_binary(data: AsyncIterator[bytes], target: str, chunk_size: int = 8192, full_size: Optional[int] = None): if chunk_size <= 0 or not isinstance(chunk_size, int): raise ValueError("The chunk size must be a positive integer") mkdirs(dirname(target)) speed = progressbar.AdaptiveTransferSpeed(samples=datetime.timedelta(seconds=5)) speed.INTERVAL = datetime.timedelta(milliseconds=500) if full_size is not None and full_size > 0 and isinstance(chunk_size, int): eta = progressbar.AdaptiveETA(samples=datetime.timedelta(seconds=5)) eta.INTERVAL = datetime.timedelta(milliseconds=500) widgets = [ ' ', progressbar.Timer(format='%(elapsed)s'), ' ', progressbar.Bar(left='[', right=']'), ' ', progressbar.Percentage(), ' - ', progressbar.DataSize(), '/', progressbar.DataSize('max_value'), ' @ ', speed, ' (', eta, ') ', ] bar = progressbar.ProgressBar(max_value=full_size, widgets=widgets, redirect_stdout=True) else: widgets = [ ' ', progressbar.Timer(format='%(elapsed)s'), ' - ', progressbar.DataSize(), ' @ ', speed, ' - ', progressbar.AnimatedMarker() ] bar = progressbar.ProgressBar(widgets=widgets, redirect_stdout=True, redirect_stderr=True) async with aopen(target, "wb") as f: with bar: async for chunk in data: await f.write(chunk) bar += len(chunk)
def prog_bar2(response, size, fh): """ Using the ProgressBar2 library, generate a progress bar to help determine the download speed, time, etc. Not very useful for VCF files as they come down almost instantly. But, for BAM files, helpful to get an idea of how long it'll take to get here. For DNA BAM files, we don't know the size, so just output the amount downloaded, speed, etc. For RNA BAM, can get a whole set of data. """ wrote = 0 if size is None: # Have a DNA BAM and don't know the actual size. widgets = [ 'Downloaded: ', progressbar.DataSize(), '; (', progressbar.FileTransferSpeed(), ' | ', progressbar.Timer(), ')', ] pbar = progressbar.ProgressBar( widgets=widgets, maxval=progressbar.UnknownLength).start() else: widgets = [ progressbar.Percentage(), progressbar.Bar(marker="=", left=" [", right="] "), progressbar.DataSize(), ' of ', progressbar.DataSize(variable='max_value'), " (", progressbar.FileTransferSpeed(), ", ", progressbar.ETA(), " )", ] size = int(size) pbar = progressbar.ProgressBar(widgets=widgets, maxval=size, term_width=100).start() for buf in response.iter_content(1024): if buf: fh.write(buf) wrote += len(buf) pbar.update(wrote) pbar.finish()
def downloadFile(start, end, count, partSize): print(threading.currentThread()) global downloadUrl widgets = [ p.FileTransferSpeed(prefixes=('Ki', 'Mi')), p.Bar(left=' |', marker='>', fill='-', right='|'), p.DataSize(prefixes=('Ki', 'Mi')), ' (', p.Percentage(), ')| ', p.ETA() ] requestHeaders = {'Accept-Encoding': '*', 'Range': f'Bytes={start}-{end}'} tempFile = requests.get(downloadUrl, headers=requestHeaders, stream=True) # .content tempFileName = f'E:\\Users\\acer\\Desktop\\pydl.pdf.{count}' # with closing(tempFile) as re: chunk = 1024 # B # count = 0 # fileSize = fileSize / 1024 # KiB print( f'Downloading {tempFileName} from {downloadUrl}\nFile size: {partSize/1024/1024} MiB.' ) with open(tempFileName, 'wb') as f: for data in p.progressbar( tempFile.iter_content(chunk_size=chunk), max_value=partSize, widgets=widgets): # ,prefix='* ', suffix=' #'): # f.seek(start) f.write(data)
def download_to_file(model, hexdigest): filename = f"data/{model}.model" if os.path.exists(filename): return filename os.makedirs("data", exist_ok=True) response = urllib.request.urlopen(f"{DATA_URL}/{model}.model.bz2") widgets = [ progressbar.Percentage(), progressbar.Bar(marker="■", fill="·"), progressbar.DataSize(), " ", progressbar.ETA(), ] bunzip, output, hasher = ( bz2.BZ2Decompressor(), open(f"{DATA_FOLDER}/{model}.model", "wb"), hashlib.new("md5"), ) with progressbar.ProgressBar(max_value=response.length, widgets=widgets) as bar: for i in range((response.length // DATA_CHUNK) + 1): chunk = response.read(DATA_CHUNK) data = bunzip.decompress(chunk) bar.update(i * DATA_CHUNK) hasher.update(data) output.write(data) assert hasher.hexdigest( ) == hexdigest, "WARNING: Data has unexpected MD5 checksum." return filename
def test_all_widgets_max_width(max_width, term_width): widgets = [ progressbar.Timer(max_width=max_width), progressbar.ETA(max_width=max_width), progressbar.AdaptiveETA(max_width=max_width), progressbar.AbsoluteETA(max_width=max_width), progressbar.DataSize(max_width=max_width), progressbar.FileTransferSpeed(max_width=max_width), progressbar.AdaptiveTransferSpeed(max_width=max_width), progressbar.AnimatedMarker(max_width=max_width), progressbar.Counter(max_width=max_width), progressbar.Percentage(max_width=max_width), progressbar.FormatLabel('%(value)d', max_width=max_width), progressbar.SimpleProgress(max_width=max_width), progressbar.Bar(max_width=max_width), progressbar.ReverseBar(max_width=max_width), progressbar.BouncingBar(max_width=max_width), progressbar.FormatCustomText('Custom %(text)s', dict(text='text'), max_width=max_width), progressbar.DynamicMessage('custom', max_width=max_width), progressbar.CurrentTime(max_width=max_width), ] p = progressbar.ProgressBar(widgets=widgets, term_width=term_width) p.update(0) p.update() for widget in p._format_widgets(): if max_width and max_width < term_width: assert widget == '' else: assert widget != ''
def display_handlers(self): """sleep(1) print('\n\n') buff = self.done pbar = tqdm(total=self.size, unit_scale=1, unit='B', initial=buff, unit_divisor=1024) while (not self.completed) or (self.stoped): done = self.done pbar.update(done - buff) buff = done pbar.close() sleep(0.1)""" print('\n\n') _initial = self.done widgets = [progressbar.Bar(marker="#", left="[", right="]"), progressbar.Percentage(), " | ", progressbar.FileTransferSpeed(), " | ", progressbar.DataSize(), " | ", progressbar.ETA()] bar = progressbar.ProgressBar( widgets=widgets, maxval=self.size, initial_value=_initial).start() while (not self.completed) or (self.stoped): bar.update(self.done) sleep(0.0167) bar.finish()
def test_all_widgets_large_values(max_value): widgets = [ progressbar.Timer(), progressbar.ETA(), progressbar.AdaptiveETA(), progressbar.AbsoluteETA(), progressbar.DataSize(), progressbar.FileTransferSpeed(), progressbar.AdaptiveTransferSpeed(), progressbar.AnimatedMarker(), progressbar.Counter(), progressbar.Percentage(), progressbar.FormatLabel('%(value)d/%(max_value)d'), progressbar.SimpleProgress(), progressbar.Bar(fill=lambda progress, data, width: '#'), progressbar.ReverseBar(), progressbar.BouncingBar(), progressbar.FormatCustomText('Custom %(text)s', dict(text='text')), ] p = progressbar.ProgressBar(widgets=widgets, max_value=max_value) p.update() time.sleep(1) p.update() for i in range(0, 10**6, 10**4): time.sleep(1) p.update(i)
def test_all_widgets_small_values(max_value): widgets = [ progressbar.Timer(), progressbar.ETA(), progressbar.AdaptiveETA(), progressbar.AbsoluteETA(), progressbar.DataSize(), progressbar.FileTransferSpeed(), progressbar.AdaptiveTransferSpeed(), progressbar.AnimatedMarker(), progressbar.Counter(), progressbar.Percentage(), progressbar.FormatLabel('%(value)d'), progressbar.SimpleProgress(), progressbar.Bar(), progressbar.ReverseBar(), progressbar.BouncingBar(), progressbar.CurrentTime(), progressbar.CurrentTime(microseconds=False), progressbar.CurrentTime(microseconds=True), ] p = progressbar.ProgressBar(widgets=widgets, max_value=max_value) for i in range(10): time.sleep(1) p.update(i + 1) p.finish()
def show_progress(block_num, block_size, total_size): global pbar if pbar is None: if total_size > 0: prefixes = ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi') power = min(int(math.log(total_size, 2) / 10), len(prefixes) - 1) scaled = float(total_size) / (2**(10 * power)) total_size_str = '{:.1f} {}B'.format(scaled, prefixes[power]) try: marker = '█' except UnicodeEncodeError: marker = '*' widgets = [ progressbar.Percentage(), ' ', progressbar.DataSize(), ' / ', total_size_str, ' ', progressbar.Bar(marker=marker), ' ', progressbar.ETA(), ' ', progressbar.AdaptiveTransferSpeed(), ] pbar = progressbar.ProgressBar(widgets=widgets, max_value=total_size) else: widgets = [ progressbar.DataSize(), ' ', progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.Timer(), ' ', progressbar.AdaptiveTransferSpeed(), ] pbar = progressbar.ProgressBar(widgets=widgets, max_value=progressbar.UnknownLength) downloaded = block_num * block_size if downloaded < total_size: pbar.update(downloaded) else: pbar.finish() pbar = None
def extractFile(self, filename): def on_progress(filename, position, total_size, pb): pass def get_file_progress_file_object_class(on_progress, pb): class FileProgressFileObject(tarfile.ExFileObject): def read(self, size, *args): on_progress(self.name, self.position, self.size, pb) return tarfile.ExFileObject.read(self, size, *args) return FileProgressFileObject class ProgressFileObject(io.FileIO): def __init__(self, path, pb, *args, **kwargs): self.pb = pb self._total_size = os.path.getsize(path) io.FileIO.__init__(self, path, *args, **kwargs) def read(self, size): self.pb.update(self.tell()) return io.FileIO.read(self, size) #: terms = shutil.get_terminal_size((100, 100)) # filler = 0 # if terms[0] > 100: # filler = int(terms[0]/4) widgets = [ progressbar.FormatCustomText("Extracting : {:25.25}".format(os.path.basename(filename))), " ", progressbar.Percentage(), " ", progressbar.Bar(fill=chr(9617), marker=chr(9608), left="[", right="]"), " ", progressbar.DataSize(), "/", progressbar.DataSize(variable="max_value"), # " "*filler, ] pbar = progressbar.ProgressBar(widgets=widgets, maxval=os.path.getsize(filename)) pbar.start() tarfile.TarFile.fileobject = get_file_progress_file_object_class(on_progress, pbar) tar = tarfile.open(fileobj=ProgressFileObject(filename, pbar), mode="r:*") outputPath = os.path.commonprefix(tar.getnames()) if os.path.isfile(outputPath): return outputPath tar.close() pbar.finish() else: tar.extractall() tar.close() pbar.finish() return outputPath
def _progressbar_init(max_value: Union[int, progressbar.UnknownLength]): """ Initialize the progressbar object with the max_value passed as parameter :param max_value: int :return: ProgressBar """ widgets = [ progressbar.Bar(left="[", right="]"), progressbar.Percentage(), " | ", progressbar.FileTransferSpeed(), " | ", progressbar.DataSize(), " / ", progressbar.DataSize(variable="max_value"), " | ", progressbar.ETA(), ] bar = progressbar.ProgressBar(max_value=max_value, widgets=widgets) return bar
def make_progress_bar(name, size): widgets = [ '%s: ' % name, progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA(), ' ', progressbar.DataSize(), ] return progressbar.ProgressBar(widgets=widgets, max_value=size)
def generate_hash(filename, log): ed2k_block = 9500 * 1024 ed2k_hash = b'' file_size = None with open(filename, 'rb') as f: file_size = os.fstat(f.fileno()).st_size bar = 0 currentblock = 0 prefixes = ('', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') if type(log.handlers[0]) == logging.StreamHandler: widgets = [ progressbar.Percentage(), " | ", progressbar.DataSize(prefixes=prefixes), "/", progressbar.DataSize(variable="max_value", prefixes=prefixes), " | ", progressbar.Bar(marker="#", left="[", right="]"), progressbar.ETA() ] progressbar.streams.wrap_stderr() bar = progressbar.ProgressBar(max_value=file_size, widgets=widgets) while True: block = f.read(ed2k_block) currentblock += sys.getsizeof(block) if not block: break ed2k_hash += md4(block).digest() if file_size % ed2k_block == 0: ed2k_hash += md4('').digest if bar: try: bar.update(currentblock) except ValueError: bar.finish() #yes this is expected. #sizes and bytes are black magic. #maybe someday someone will have a better idea. ed2k_hash = md4(ed2k_hash).hexdigest() log.debug("Size: {}, hash: {}".format(file_size, ed2k_hash)) return {"size": file_size, "hash": ed2k_hash}
def __init__(self, total: int = None): widgets = [ " [", progressbar.Timer(), "] ", progressbar.DataSize(), progressbar.Bar(), progressbar.AdaptiveTransferSpeed(), " (", progressbar.ETA(), ") ", ] self.bar = progressbar.ProgressBar(max_value=total, widgets=widgets) self.seen_so_far = 0
def make_progress_bar(name, size): widgets = [ '%s: ' % name[:8], progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.AdaptiveETA(), ' ', progressbar.DataSize(), ' ', progressbar.AdaptiveTransferSpeed(), ] return progressbar.ProgressBar(widgets=widgets, max_value=size)
def download_progressbar(total_size): """ Create a progress bar to show in real-time a download status """ # Compute DownaloadProgressBar max value if total_size <= 0: max_val = progressbar.UnknownLength else: max_val = int(total_size / CHUNK_SIZE) # DownaloadProgressBar settings MARKER = '█' PREFIXES = ('', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')[1:] POLL_INTERVAL = 0.8 # DownaloadProgressBar spacing LEFT_SPACE = 4 PERCENTAGE_SPACE = 4 PRE_BAR_SPACE = 1 BAR_SPACE = 35 POST_BAR_SPACE = 1 DATA_SIZE_SPACE = 8 PRE_SPEED_SPACE = 1 SPEED_SPACE = 8 # Compute right spacing, and ensure that is not negative try: right_space = int(get_terminal_size()[0]) - \ LEFT_SPACE - PERCENTAGE_SPACE - PRE_BAR_SPACE - BAR_SPACE - \ POST_BAR_SPACE - DATA_SIZE_SPACE - PRE_SPEED_SPACE - SPEED_SPACE if right_space < 0: right_space = 0 except (ValueError, TypeError, ArithmeticError): right_space = 0 # Define DownaloadProgressBar skin bar_skin = ([ LEFT_SPACE * ' ', progressbar.Percentage(), PRE_BAR_SPACE * ' ', progressbar.Bar(marker=MARKER), POST_BAR_SPACE * ' ', progressbar.DataSize(prefixes=PREFIXES), PRE_SPEED_SPACE * ' ', progressbar.AdaptiveTransferSpeed(prefixes=PREFIXES), right_space * ' ' ]) # Generate DownaloadProgressBar return progressbar.ProgressBar(max_value=max_val, widgets=bar_skin, poll_interval=POLL_INTERVAL)
def prepare_file_operation_bar(filesize: int) -> progressbar.ProgressBar: return progressbar.ProgressBar( maxval=filesize, widgets=[ progressbar.Percentage(), " ", progressbar.Bar(), " ", progressbar.FileTransferSpeed(), " | ", progressbar.DataSize(), " | ", progressbar.ETA(), ], )
def __init__(self, sc64: SC64) -> None: self.__sc64 = sc64 self.__widgets = [ "[ ", progressbar.FormatLabel("{variables.label}", new_style=True), " | ", progressbar.Bar(left="", right=""), " ", progressbar.Percentage(), " | ", progressbar.DataSize(prefixes=(" ", "Ki", "Mi")), " | ", progressbar.AdaptiveTransferSpeed(), " ]" ] self.__variables = {"label": ""} self.__bar = None
def download_to_file(model, hexdigest): filename = f'data/{model}.model' if os.path.exists(filename): return filename response = urllib.request.urlopen(f'{DATA_URL}/{model}.model.bz2') widgets = [progressbar.Percentage(), progressbar.Bar(marker='■', fill='·'), progressbar.DataSize(), ' ', progressbar.ETA()] bunzip, output, hasher = bz2.BZ2Decompressor(), open(f'{DATA_FOLDER}/{model}.model', 'wb'), hashlib.new('md5') with progressbar.ProgressBar(max_value=response.length, widgets=widgets) as bar: for i in range((response.length // DATA_CHUNK)+1): chunk = response.read(DATA_CHUNK) data = bunzip.decompress(chunk) bar.update(i * DATA_CHUNK) hasher.update(data) output.write(data) assert hasher.hexdigest() == hexdigest, 'WARNING: Data has unexpected MD5 checksum.' return filename
def download_mod_files(save_location, mod_files: List[ModFile]): session = requests.Session() total = 0 total_obtained = 0 for mod_file in mod_files: total += mod_file.file_length widgets = [ progressbar.widgets.Percentage(), ' of ', progressbar.DataSize('max_value'), ' @ ', progressbar.AdaptiveTransferSpeed(), ' ', progressbar.Bar(), ' ', progressbar.Timer(), ' ', progressbar.AdaptiveETA() ] # Preallocate space. for mod_file in mod_files: logger.debug(f'Preallocating space for {mod_file.file_name}') with open(Path(save_location).joinpath(mod_file.file_name), 'wb') as fd: fd.truncate(mod_file.file_length) with progressbar.ProgressBar(max_value=total, widgets=widgets).start() as bar: for mod_file in mod_files: logger.info(f'Downloading: {mod_file.file_name}') bar.update(total_obtained) r = session.get(mod_file.download_url, stream=True) #content_length = int(r.headers['content-length']) r.raise_for_status() with open(Path(save_location).joinpath(mod_file.file_name), 'wb') as fd: for chunk in r.iter_content(chunk_size=128): total_obtained = total_obtained + len(chunk) bar.update(total_obtained) fd.write(chunk)
def download(build, output, extension = INSTALLER_EXTENSION): """Downloads a specific version of the Papertrail installation package.""" url = (S3_BUCKET + "/public/nightly/build/Papertrail_%s.%s") % (build, extension) print(url) response = requests.get(url, stream=True) size = response.headers['content-length'] with open(output, 'wb') as f: progress = progressbar.ProgressBar(max_value=int(size), widgets = [ progressbar.DataSize(), progressbar.Bar(), ' ', progressbar.FileTransferSpeed(), ' | ', progressbar.Timer(), ', ', progressbar.ETA() ]) nbytes = 0 for chunk in response.iter_content(4096): if chunk: nbytes += len(chunk) progress.update(nbytes) f.write(chunk) response.close()
def upload(self, file_name, name='', password='', secure=False, lifetime=None): """Upload a file. :param file_name: the path to the file that should be uploaded :param name: the name of the upload :param password: a password for the upload :param secure: if True, use a secure URL (a longer shortcut) :param lifetime: a positive int that determines the lifetime of an uploaded file/paste in seconds. """ if file_name == '-': try: stdin = sys.stdin.buffer except AttributeError: stdin = sys.stdin content = stdin.read() file_reader = io.BytesIO(content) elif os.path.isfile(file_name): file_reader = open(file_name, 'rb') else: LOGGER.error('Invalid input: ' + file_name) sys.exit(1) if name: upload_file = (name, file_reader) else: upload_file = (file_name, file_reader) if lifetime: try: self.upload_lifetime = str(int(lifetime)) except ValueError: LOGGER.error('Invalid lifetime value: ' + lifetime) sys.exit(1) files = {'uploaded_file': upload_file} data = {'lifetime': str(self.upload_lifetime), 'secure_shortcut': str(secure).lower()} if password: data['password'] = password def callback(monitor): progress.update(monitor.bytes_read) fields_dict = data.copy() fields_dict.update(files) enc = requests_toolbelt.multipart.encoder.MultipartEncoder(fields=fields_dict) widgets = [progressbar.DataSize(), ' /', progressbar.DataSize('max_value'), ' (', progressbar.Percentage(), ') |', progressbar.FileTransferSpeed(), ' ', progressbar.Bar(), ' ', progressbar.Timer()] progress = progressbar.ProgressBar(max_value=enc.len, filled_char='=', widgets=widgets).start() m = requests_toolbelt.multipart.encoder.MultipartEncoderMonitor(enc, callback) self.headers['Content-Type'] = m.content_type resp = requests.post(self.files_url, headers=self.headers, data=m, stream=True) progress.finish() if resp.status_code == 201: upload_url = self.url + '/' + resp.json()['shortcut'] if password: upload_url = upload_url + '?password='******'Failed to upload file (' + str(resp.status_code) + ')') sys.exit(1)
# bar = p.ProgressBar(max_value=p.UnknownLength, widgets=w) # for i in range(51): # time.sleep(0.1) # bar.update(i) # *[>---------------------------------------------------------------] 10.0 B/s #456 '''''' # read: Shortcut mode preferred import progressbar as p fileSize = 100000 w = [ p.FileTransferSpeed(), p.Bar(left=" |", marker=">", fill="-", right="|"), p.DataSize(), f"/ {fileSize} KiB (", p.Percentage(), ')| ', p.ETA(), ] for i in p.progressbar(range(1), widgets=w): # , prefix="* ", suffix=" #"): time.sleep(0.05) for j in p.progressbar(range(50),widgets=w): time.sleep(0.01) # 19.9 B/s |>>>>>>>>>>>>>>>>>>>>>>>>>| 51.0 B/ 100000 KiB (100%)| Time: 0:00:02 '''''' # from eprogress import MultiProgressManager,LineProgress # import threading # # # def test(pm):
def get_volumes(token, volume_ids, host, port, concat=False): """ Returns volumes from the Data API as a raw zip stream. Parameters: :token: An OAuth2 token for the app. :volume_ids: A list of volume_ids :concat: If True, return a single file per volume. If False, return a single file per page (default). :host: Data API host :port: Data API port """ if not volume_ids: raise ValueError("volume_ids is empty.") url = htrc.config.get_dataapi_epr() + "volumes" for id in volume_ids: if "." not in id: print("Invalid volume id " + id + ". Please correct this volume id and try again.") data = { 'volumeIDs': '|'.join([id.replace('+', ':').replace('=', '/') for id in volume_ids]) } if concat: data['concat'] = 'true' # Authorization headers = { "Authorization": "Bearer " + token, "Content-type": "application/x-www-form-urlencoded" } # Create SSL lookup # TODO: Fix SSL cert verification ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE # Retrieve the volumes httpsConnection = http.client.HTTPSConnection(host, port, context=ctx) httpsConnection.request("POST", url, urlencode(data), headers) response = httpsConnection.getresponse() if response.status is 200: body = True data = BytesIO() bytes_downloaded = 0 bar = progressbar.ProgressBar(max_value=progressbar.UnknownLength, widgets=[ progressbar.AnimatedMarker(), ' ', progressbar.DataSize(), ' (', progressbar.FileTransferSpeed(), ')' ]) while body: body = response.read(128) data.write(body) bytes_downloaded += len(body) bar.update(bytes_downloaded) data = data.getvalue() else: logging.debug("Unable to get volumes") logging.debug("Response Code: {}".format(response.status)) logging.debug("Response: {}".format(response.reason)) raise EnvironmentError("Unable to get volumes.") if httpsConnection is not None: httpsConnection.close() return data
def download_file(self, url=None, outputFileName=None, outputPath=None, bytes=False): def fmt_size(num, suffix="B"): for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]: if abs(num) < 1024.0: return "%3.1f%s%s" % (num, unit, suffix) num /= 1024.0 return "%.1f%s%s" % (num, "Yi", suffix) #: if not url: raise Exception("No URL specified.") if outputPath is None: # Default to current dir. outputPath = os.getcwd() else: if not os.path.isdir(outputPath): raise Exception( 'Specified path "{0}" does not exist'.format(outputPath)) fileName = os.path.basename(url) # Get URL filename userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0" if 'sourceforge.net' in url.lower(): userAgent = 'wget/1.18' # sourceforce <3 wget if url.lower().startswith("ftp://"): self.log("Requesting : {0}".format(url)) if outputFileName != None: fileName = outputFileName fullOutputPath = os.path.join(outputPath, fileName) urllib.request.urlretrieve(url, fullOutputPath) return fullOutputPath req = requests.get(url, stream=True, headers={"User-Agent": userAgent}) if req.status_code != 200: req.raise_for_status() if "content-disposition" in req.headers: reSponse = re.findall("filename=(.+)", req.headers["content-disposition"]) if reSponse == None: fileName = os.path.basename(url) else: fileName = reSponse[0] size = None compressed = False if "Content-Length" in req.headers: size = int(req.headers["Content-Length"]) if "Content-Encoding" in req.headers: if req.headers["Content-Encoding"] == "gzip": compressed = True self.log("Requesting : {0} - {1}".format( url, fmt_size(size) if size != None else "?")) # terms = shutil.get_terminal_size((100,100)) # filler = 0 # if terms[0] > 100: # filler = int(terms[0]/4) widgetsNoSize = [ progressbar.FormatCustomText("Downloading: {:25.25}".format( os.path.basename(fileName))), " ", progressbar.AnimatedMarker(markers='|/-\\'), " ", progressbar.DataSize() # " "*filler ] widgets = [ progressbar.FormatCustomText("Downloading: {:25.25}".format( os.path.basename(fileName))), " ", progressbar.Percentage(), " ", progressbar.Bar(fill=chr(9617), marker=chr(9608), left="[", right="]"), " ", progressbar.DataSize(), "/", progressbar.DataSize(variable="max_value"), " |", progressbar.AdaptiveTransferSpeed(), " | ", progressbar.ETA(), # " "*filler ] pbar = None if size == None: pbar = progressbar.ProgressBar(widgets=widgetsNoSize, maxval=progressbar.UnknownLength) else: pbar = progressbar.ProgressBar(widgets=widgets, maxval=size) if outputFileName != None: fileName = outputFileName fullOutputPath = os.path.join(outputPath, fileName) updateSize = 0 if isinstance(pbar.max_value, int): updateSize = pbar.max_value if pbar.max_value < 1024 else 1024 if bytes == True: output = b"" bytesrecv = 0 pbar.start() for buffer in req.iter_content(chunk_size=1024): if buffer: output += buffer if compressed: pbar.update(updateSize) else: pbar.update(bytesrecv) bytesrecv += len(buffer) pbar.finish() return output else: with open(fullOutputPath, "wb") as file: bytesrecv = 0 pbar.start() for buffer in req.iter_content(chunk_size=1024): if buffer: file.write(buffer) file.flush() if compressed: pbar.update(updateSize) else: pbar.update(bytesrecv) bytesrecv += len(buffer) pbar.finish() return fullOutputPath
import requests import click import progressbar __version__ = '0.5.3' PYWEEK_URL = 'https://pyweek.org' CLI_PYPI_URL = 'https://pypi.org/pypi/pyweek/json' PROGRESSBAR_WIDGETS = [ progressbar.Percentage(), ' ', progressbar.Bar(marker='\u2588'), ' ', progressbar.ETA(), ' ', progressbar.DataSize(), ' ', progressbar.FileTransferSpeed(), ] sess = requests.Session() def version_check(): """Check that this CLI is up-to-date.""" if os.environ.get('PYWEEK_SKIP_VERSION_CHECK') is not None: return resp = sess.get(CLI_PYPI_URL) resp.raise_for_status() pkginfo = resp.json() v = version.parse(pkginfo['info']['version'])
def main(): parser = argparse.ArgumentParser() parser.add_argument("url", help="URL to download", type=str, default="", nargs="?") parser.add_argument( "--plugin-config", help= "Configuration for config, can be used multiple times. Syntax example: 'input.http={\"some\":\"json\"}'", type=str, action="append") parser.add_argument("--verbose", help="Increase output verbosity", action="store_true", dest="verbose") # parser.add_argument("--serve", help="Run a service to serve files", dest="serve", choices=OutputBase.get_all_plugins()) args = parser.parse_args() if args.verbose: logging.basicConfig( level=logging.DEBUG if args.verbose else logging.ERROR, format='%(asctime)-15s:%(levelname)s:%(name)s:%(message)s') plugin_configs = {} if args.plugin_config: for plugin_config in args.plugin_config: plugin_name, cfg = plugin_config.split('=', 1) plugin_configs[plugin_name] = json.loads(cfg) # if args.serve: # plugin_cls = OutputBase.find_plugin(args.serve) # print('Starting to serve from %r' % (plugin_cls, )) # plugin_cls_args = plugin_configs.get('output.%s' % plugin_cls.name, {}) # plugin_cls_args['plugin_configs'] = plugin_configs # plugin = plugin_cls(**plugin_cls_args) # plugin.start() # elif args.url: if args.url: parsed_url = urlsplit(args.url) plugin_cls = InputBase.find_plugin(parsed_url.scheme) if not plugin_cls: sys.stderr.write('Unknown scheme %s\n' % (parsed_url.scheme, )) quit(1) # TODO: Fix up to work with new stuff and not just a fast http downloader # Also fix Item instead of None plugin = plugin_cls( None, args.url, **plugin_configs.get('input.%s' % plugin_cls.plugin_name, {})) file_modes = 'wb' current_byte = 0 if os.path.isfile(plugin.filename): size = os.path.getsize(plugin.filename) if size > plugin.size: print( 'File %s already exists and is bigger than the file you are trying to download' % (plugin.filename, )) if not query_yes_no( 'Do you want to overwrite your local file?', 'no'): quit() os.remove(plugin.filename) else: print( 'File %s already exists and is smaller than the file you are trying to download' % (plugin.filename, )) if query_yes_no( 'Do you want to resume downloading to your local file?', 'yes'): file_modes = 'ab' plugin.seek(size) current_byte = size print('Started downloading %s' % (plugin.filename, )) widgets = [ ' ', progressbar.Percentage(), ' ', progressbar.DataSize(), ' of ', progressbar.DataSize(variable='max_value'), ' ', progressbar.Bar(), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed(), ] bar = progressbar.ProgressBar(max_value=plugin.size, widgets=widgets) bar.update(1) with open(plugin.filename, file_modes) as f: while True: d = plugin.read() if not d: break f.write(d) current_byte += len(d) bar.update(current_byte) bar.finish() else: parser.print_help()
tmpfile = open(tmpfilepath, "wb") r = sess.get(videourl, stream=True) totallen = int(r.headers['Content-Length']) receivedlen = 0 print "Downloading the video for %s" % pageurl if debug and debug_no_download: continue prgbar = progressbar.ProgressBar( max_value=totallen, widgets=[ "%-20s" % (filename), progressbar.Bar(), " ", progressbar.Percentage(), " ", progressbar.ETA(), " ", progressbar.FileTransferSpeed(), " ", progressbar.DataSize("value"), "/", progressbar.DataSize("max_value") ]) for chunk in r: receivedlen += len(chunk) tmpfile.write(chunk) prgbar.update(receivedlen) tmpfile.close() tmpfile = None prgbar.finish() prgbar = None os.rename(tmpfilepath, filepath) except Exception, e: # print "[Error] Something went wrong when downloading the file" traceback.print_exc() if not tmpfile is None: