def get_progressbar(self, label=None, maxval=None): if self.args.get('show_porcelain'): return _MachineReadableCounter(label=label, maxval=maxval) elif 'progressbar' in sys.modules and self.args.get( 'show_progress', False): widgets = [] if label is not None: widgets += [label, ' '] if maxval is not None: widgets += [ progressbar.Percentage(), ' ', progressbar.Bar(marker='='), ' ', _FileSize(), ' ', progressbar.FileTransferSpeed(), ' ' ] if 'AdaptiveETA' in dir(progressbar): widgets.append(progressbar.AdaptiveETA()) else: widgets.append(progressbar.ETA()) pbar = progressbar.ProgressBar(widgets=widgets, maxval=(maxval or sys.maxint), poll=0.05) # # The ProgressBar class initializer installs a signal handler # for SIGWINCH to resize the progress bar. Sometimes this can # interrupt long running system calls which can cause an # IOError exception to be raised. The call to siginterrupt # below will retrieve the currently installed signal handler # for SIGWINCH and set the SA_RESTART flag. This will cause # system calls to be restarted after the handler has been # executed instead of raising an exception. # signal.siginterrupt(signal.SIGWINCH, False) return pbar else: widgets += [ _IndeterminateBouncingBar(marker='='), ' ', _FileSize(), ' ', progressbar.FileTransferSpeed(), ' ', progressbar.Timer(format='Time: %s') ] pbar = _IndeterminateProgressBar(widgets=widgets, maxval=(maxval or sys.maxint), poll=0.05) # See comment above signal.siginterrupt(signal.SIGWINCH, False) return pbar else: return _EveryMethodObject()
def prog_bar2(response, size, fh): """ Using the ProgressBar2 library, generate a progress bar to help determine the download speed, time, etc. Not very useful for VCF files as they come down almost instantly. But, for BAM files, helpful to get an idea of how long it'll take to get here. For DNA BAM files, we don't know the size, so just output the amount downloaded, speed, etc. For RNA BAM, can get a whole set of data. """ wrote = 0 if size is None: # Have a DNA BAM and don't know the actual size. widgets = [ 'Downloaded: ', progressbar.DataSize(), '; (', progressbar.FileTransferSpeed(), ' | ', progressbar.Timer(), ')', ] pbar = progressbar.ProgressBar( widgets=widgets, maxval=progressbar.UnknownLength).start() else: widgets = [ progressbar.Percentage(), progressbar.Bar(marker="=", left=" [", right="] "), progressbar.DataSize(), ' of ', progressbar.DataSize(variable='max_value'), " (", progressbar.FileTransferSpeed(), ", ", progressbar.ETA(), " )", ] size = int(size) pbar = progressbar.ProgressBar(widgets=widgets, maxval=size, term_width=100).start() for buf in response.iter_content(1024): if buf: fh.write(buf) wrote += len(buf) pbar.update(wrote) pbar.finish()
def test_all_widgets_max_width(max_width, term_width): widgets = [ progressbar.Timer(max_width=max_width), progressbar.ETA(max_width=max_width), progressbar.AdaptiveETA(max_width=max_width), progressbar.AbsoluteETA(max_width=max_width), progressbar.DataSize(max_width=max_width), progressbar.FileTransferSpeed(max_width=max_width), progressbar.AdaptiveTransferSpeed(max_width=max_width), progressbar.AnimatedMarker(max_width=max_width), progressbar.Counter(max_width=max_width), progressbar.Percentage(max_width=max_width), progressbar.FormatLabel('%(value)d', max_width=max_width), progressbar.SimpleProgress(max_width=max_width), progressbar.Bar(max_width=max_width), progressbar.ReverseBar(max_width=max_width), progressbar.BouncingBar(max_width=max_width), progressbar.FormatCustomText('Custom %(text)s', dict(text='text'), max_width=max_width), progressbar.DynamicMessage('custom', max_width=max_width), progressbar.CurrentTime(max_width=max_width), ] p = progressbar.ProgressBar(widgets=widgets, term_width=term_width) p.update(0) p.update() for widget in p._format_widgets(): if max_width and max_width < term_width: assert widget == '' else: assert widget != ''
def load_from_url(url: str, save_path: str): # Create progressbar widgets = [ 'Downloaded: ', pb.Percentage(), ' ', pb.Bar(marker=pb.RotatingMarker()), ' ', pb.ETA(), ' ', pb.FileTransferSpeed() ] pbar = pb.ProgressBar(widgets=widgets) def dl_progress(count, blockSize, totalSize): if pbar.max_value is None: pbar.max_value = totalSize pbar.start() pbar.update(min(count * blockSize, totalSize)) # Create the save path if not exists os.makedirs(os.path.dirname(save_path), exist_ok=True) # Download the file print(f'Downloading file from {url}.') try: urllib.request.urlretrieve(url, save_path, reporthook=dl_progress) except: print('Unable to download file. Please download ', 'manually from the URL above and place it in ', f'the following path {save_path}.') # Close progressbar pbar.finish()
def eta_types_demonstration(): widgets = [ progressbar.Percentage(), ' ETA: ', progressbar.ETA(), ' Adaptive ETA: ', progressbar.AdaptiveETA(), ' Absolute ETA: ', progressbar.AbsoluteETA(), ' Transfer Speed: ', progressbar.FileTransferSpeed(), ' Adaptive Transfer Speed: ', progressbar.AdaptiveTransferSpeed(), ' ', progressbar.Bar(), ] bar = progressbar.ProgressBar(widgets=widgets, max_value=500) bar.start() for i in range(500): if i < 100: time.sleep(0.02) elif i > 400: time.sleep(0.1) else: time.sleep(0.01) bar.update(i + 1) bar.finish()
def get_progress_info(): """Return a function callback to update the progressbar.""" progressinfo = struct("ProgressInfo", ["callback", "finish"]) if progressbar: bar = progressbar.ProgressBar(widgets=[ progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed(), ]) def _callback(total_size, completed): if not hasattr(bar, "next_update"): if hasattr(bar, "maxval"): bar.maxval = total_size else: bar.max_value = total_size bar.start() bar.update(completed) def _finish(): if hasattr(bar, "next_update"): return bar.finish() return progressinfo(callback=_callback, finish=_finish) else: return progressinfo(callback=None, finish=lambda: True)
def upload_binary(self, stream): self._port.write('z') widgets = [ 'Uploading safe mode ', progressbar.Percentage(), ' ', progressbar.Bar(marker='#', left='[', right=']'), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed(), ] file_size = os.fstat(f.fileno()).st_size with progressbar.ProgressBar(widgets=widgets, max_value=file_size) as bar: modem = xmodem.XMODEM(getc=self._xmodem_getc, putc=self._xmodem_putc) r = modem.send(stream, quiet=True, callback=self._xmodem_report_progress( bar, file_size)) if not r: print 'Upload failed!' return False print 'Binary uploaded' self._wait_for('Done!') print 'Upload finished'
def _addresses_to_check_with_caching(self, show_progress=True): num_addrs = len(list(self._addresses_to_check())) widgets = [ 'ROP: ', progressbar.Percentage(), ' ', progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed() ] progress = progressbar.ProgressBar(widgets=widgets, maxval=num_addrs) if show_progress: progress.start() self._cache = dict() seen = dict() for i, a in enumerate(self._addresses_to_check()): if show_progress: progress.update(i) try: bl = self.project.factory.block(a) if bl.size > self._max_block_size: continue block_data = bl.bytes except (SimEngineError, SimMemoryError): continue if block_data in seen: self._cache[seen[block_data]].add(a) continue else: if len(bl.vex.constant_jump_targets ) == 0 and not self._block_has_ip_relative(a, bl): seen[block_data] = a self._cache[a] = set() yield a if show_progress: progress.finish()
def get_progress_info(): """Return a function callback to update the progressbar.""" build = collections.namedtuple("ProgressInfo", ["callback", "finish"]) if progressbar: widgets = [ progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed(), ] bar = progressbar.ProgressBar(widgets=widgets) def _callback(total_size, completed): if not hasattr(bar, "next_update"): bar.maxval = total_size bar.start() bar.update(completed) return build(callback=_callback, finish=bar.finish) else: return build(callback=None, finish=lambda: True)
def dump(self): memtype = "flash" mem = MEM_PARTS_328P[memtype] assert (mem["pagesize"] != 0) # flash the device assert (mem["pagesize"] * mem["pagecount"] == mem["size"]) progress = 0 bar = progressbar.ProgressBar(widgets=[ 'Dump: ', progressbar.Bar(), ' ', progressbar.Counter(format='%(value)02d/%(max_value)d'), ' ', progressbar.FileTransferSpeed() ]) r = range(0, mem["size"], mem["pagesize"]) with open("dump.hex", "wb") as fd: for addr in bar(r, ): data = bytearray(b"0" * 128) self.load_addr(addr) self.read_page(memtype, data) fddata = "".join(["{:02x}".format(x) for x in data]) fd.write(fddata) fd.write("\n") debug_print("[STK500] DATA {}".format(fddata)) print("Leaving programming mode") self.leave_progmode() print("All done, go into dump.hex")
def example25(): """ Display progress bar using tqdm. >>> example25() True """ widgets = [ "Test: ", progressbar.Percentage(), " ", progressbar.Bar(marker=progressbar.RotatingMarker()), " ", progressbar.ETA(), " ", progressbar.FileTransferSpeed(), ] pbar = progressbar.ProgressBar(widgets=widgets, max_value=1000, redirect_stdout=True).start() for i in range(100): # do something pbar += 10 pbar.finish() return True
def get_progress_bar(total_size, action): """Get progressbar.ProgressBar instance for file transfer. Args: total_size: int action: str that will be prepended to the progressbar. i.e "Uploading: " or "Downloading: " Returns: progressbar.ProgressBar instance """ return progressbar.ProgressBar( max_value=total_size, widgets=[ action, progressbar.Percentage(), " ", progressbar.Bar(marker="#", left="[", right="]"), " ", progressbar.ETA(), " ", progressbar.Timer(), " ", progressbar.FileTransferSpeed(), ], redirect_stdout=True, )
def main(): hashtags_for_download = list(get_ahegao_hashtags().keys()) hashtags_list = ', '.join(hashtags_for_download) conn = sqlite3.connect('hashtags.db') conn.row_factory = sqlite3.Row videos_info = conn.execute(f'select * from videos where tag_id in ({hashtags_list}) and downloaded = 0').fetchall() widgets = [progressbar.Percentage(), ' ', progressbar.Counter(), ' ', progressbar.Bar(), ' ', progressbar.FileTransferSpeed()] pbar = progressbar.ProgressBar(widgets=widgets, max_value=len(videos_info)).start() for i, video_info in enumerate(videos_info): pbar.update(i) video_info = dict(video_info) share_id = video_info['share_id'] file_name = f'videos/{share_id}.mp4' if osp.exists(file_name): with conn: conn.execute(f'UPDATE videos SET downloaded = 1 where share_id = {share_id}') continue download_link = video_info['download_url'].replace('watermark=1', 'watermark=0') # must have here headers, otherwise it behaves as api and does not serve the video response = requests.get(download_link, allow_redirects=True, headers={ 'User-Agent': 'Mozilla/5.0', 'X-Requested-With': 'XMLHttpRequest', 'Referer': 'https://permit.pcta.org/application/' }) request.urlretrieve(response.url, file_name) with conn: conn.execute(f'UPDATE videos SET downloaded = 1 where share_id = {share_id}') pbar.finish() print('done')
def test_file_transfer_speed(total_seconds_elapsed, value, expected): widget = progressbar.FileTransferSpeed() assert widget( None, dict( total_seconds_elapsed=total_seconds_elapsed, value=value, )) == expected
def write_to_csv(self): if self.num_results > 0: self.num_results = sum(1 for line in open(self.tmp_file, 'r')) if self.num_results > 0: output_file = open(self.opts.output_file, 'a') csv_writer = csv.DictWriter(output_file, fieldnames=self.csv_headers, delimiter=self.opts.delimiter, quoting=csv.QUOTE_ALL) csv_writer.writeheader() timer = 0 widgets = ['Write to csv ', progressbar.Bar(left='[', marker='#', right=']'), progressbar.FormatLabel(' [%(value)i/%(max)i] ['), progressbar.Percentage(), progressbar.FormatLabel('] [%(elapsed)s] ['), progressbar.ETA(), '] [', progressbar.FileTransferSpeed(unit='lines'), ']' ] bar = progressbar.ProgressBar(widgets=widgets, maxval=self.num_results).start() for line in open(self.tmp_file, 'r'): timer += 1 bar.update(timer) line_as_dict = json.loads(line) line_dict_utf8 = {k: v.encode('utf8') if isinstance(v, unicode) else v for k, v in line_as_dict.items()} csv_writer.writerow(line_dict_utf8) output_file.close() bar.finish() else: print('There is no docs with selected field(s): %s.' % ','.join(self.opts.fields)) os.remove(self.tmp_file)
def predict(self): # not yet here self.load_data(train=False) with tf.Graph().as_default(): var = self.add_model() saver = tf.train.Saver() config = tf.ConfigProto(allow_soft_placement=True) config.gpu_options.allow_growth = True sess = tf.Session(config = config) saver.restore(sess, self.args.model) print >> sys.stderr, "restore model from ", self.args.model total_batch = int(np.ceil(len(self.test_data) / float(self.args.batch))) p = open(self.args.predict, "w") pbar = pb.ProgressBar(widgets=["[TEST] ", pb.FileTransferSpeed(unit="batchs"), pb.Percentage(), pb.Bar(), pb.Timer(), " ", pb.ETA()], maxval=total_batch).start() prob_l = [] for i in xrange(total_batch): batch = self.next_batch(self.args.batch, dtype='test') pred = sess.run(var['pred'], feed_dict={var['x']:batch[:,:,1:]}) # print prob.shape out_l = [str(xx) for x in pred.tolist() for xx in x] out = "\n".join(out_l)+'\n' p.write(out) pbar.update(i) pbar.finish()
def test_all_widgets_small_values(max_value): widgets = [ progressbar.Timer(), progressbar.ETA(), progressbar.AdaptiveETA(), progressbar.AbsoluteETA(), progressbar.DataSize(), progressbar.FileTransferSpeed(), progressbar.AdaptiveTransferSpeed(), progressbar.AnimatedMarker(), progressbar.Counter(), progressbar.Percentage(), progressbar.FormatLabel('%(value)d'), progressbar.SimpleProgress(), progressbar.Bar(), progressbar.ReverseBar(), progressbar.BouncingBar(), progressbar.CurrentTime(), progressbar.CurrentTime(microseconds=False), progressbar.CurrentTime(microseconds=True), ] p = progressbar.ProgressBar(widgets=widgets, max_value=max_value) for i in range(10): time.sleep(1) p.update(i + 1) p.finish()
def program(): port.timeout = None widgets = [ 'Uploading ', progressbar.Percentage(), ' ', progressbar.Bar(marker='#', left='[', right=']'), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed(), ] with open(args.image, 'rb') as f: file_size = os.fstat(f.fileno()).st_size with progressbar.ProgressBar(widgets=widgets, max_value=file_size) as bar: def _report_progress(_, success_count, error_count): transferred_size = min([file_size, 128 * success_count]) bar.update(transferred_size) modem = xmodem.XMODEM(getc=_xmodem_getc, putc=_xmodem_putc) modem.send(f, quiet=False, callback=_report_progress)
def test_all_widgets_large_values(max_value): widgets = [ progressbar.Timer(), progressbar.ETA(), progressbar.AdaptiveETA(), progressbar.AbsoluteETA(), progressbar.DataSize(), progressbar.FileTransferSpeed(), progressbar.AdaptiveTransferSpeed(), progressbar.AnimatedMarker(), progressbar.Counter(), progressbar.Percentage(), progressbar.FormatLabel('%(value)d/%(max_value)d'), progressbar.SimpleProgress(), progressbar.Bar(fill=lambda progress, data, width: '#'), progressbar.ReverseBar(), progressbar.BouncingBar(), progressbar.FormatCustomText('Custom %(text)s', dict(text='text')), ] p = progressbar.ProgressBar(widgets=widgets, max_value=max_value) p.update() time.sleep(1) p.update() for i in range(0, 10**6, 10**4): time.sleep(1) p.update(i)
def main(): conn = sqlite3.connect('hashtags.db') conn.execute( "CREATE TABLE IF NOT EXISTS hashtags (tag_id varchar NOT NULL, tag_name varchar NOT NULL);" ) conn.execute( "CREATE INDEX IF NOT EXISTS hashtags_tag_id_idx ON hashtags (tag_id);") # the hashtag id is auto-incremental # format is https://m.tiktok.com/h5/share/tag/[hashtag id].html so I can crawl it min_tag_id = 1120 # empirically found, lower numbers are empty max_tag_id = 10000000 # actually there is much more tags, but I assume those tags are not relevant or empty widgets = [ progressbar.Percentage(), ' ', progressbar.Counter(), ' ', progressbar.Bar(), ' ', progressbar.FileTransferSpeed() ] pbar = progressbar.ProgressBar(widgets=widgets, max_value=max_tag_id - min_tag_id).start() for i in range(min_tag_id, max_tag_id): pbar.update(i - min_tag_id) if conn.execute( f'select exists(select 1 from hashtags where tag_id = \'{i}\')' ).fetchone()[0]: continue tag = tag_id_to_name(i) with conn: conn.execute( 'INSERT INTO hashtags (tag_id, tag_name) VALUES (?, ?)', (str(i), tag)) pbar.finish()
def __init__(self, obj: object): super().__init__(obj) self.widgets = [ progressbar.Bar(marker="#", left="[", right="]"), progressbar.Percentage(), " | ", progressbar.FileTransferSpeed(), " | ", progressbar.SimpleProgress(), " | ", progressbar.ETA() ] self.bar = None self.block_size = 0 self.packets_sent = 0 self.pages_sent = 0 self.packet_count = 0 self.page_count = 0 self.flash_size = 0 self.current_page = 0 self.data_start = 0 self.data_end = 0 self.data = None self.page_address = 0 self.crc = 0 self.start_time = 0 self.end_time = 0 self.duration = 0
def _maybe_download_file(s3client, filelist, targetfolder): target_path = path.abspath(targetfolder) if not path.exists(target_path): print("Creating diretory {}".format(target_path)) makedirs(target_path) for fileentry in filelist: filename = fileentry["Key"].split('/')[-1] filepath = path.join(target_path, filename) try: widgets = [ 'Download {}: '.format(filename), progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA(), ' Rate: ', progressbar.FileTransferSpeed() ] bar = progressbar.ProgressBar(widgets=widgets).start( max_value=fileentry["Size"], init=True) s3client.download_file( BUCKET_NAME, fileentry["Key"], filepath, Callback=lambda d: bar.update(bar.previous_value + d)) bar.finish() except botocore.exceptions.ClientError as e: if e.response['Error']['Code'] == "404": print("The object does not exist.") elif e.response['Error']['Code'] == "403": continue else: print("Error during download. Abort.") continue _extract_file(filepath, targetfolder)
def download_file_to(link, path): r = requests.get(link, stream=True) if r.status_code != 200: print(r.status_code, link) return False file_size = int(r.headers['Content-length']) print("Downloading file %s (%d bytes)" % (path, file_size)) widgets = [ '%s: ' % os.path.basename(path), progressbar.Percentage(), ' ', progressbar.Bar(marker=progressbar.AnimatedMarker(fill='#')), ' ', progressbar.Counter('%(value)d'), '/' + str(file_size) + ' bytes downloaded', ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed(), ] bar = progressbar.ProgressBar(widgets=widgets, max_value=file_size, redirect_stdout=True).start() with open(path, "wb") as f: part = 8192 for chunk in r.iter_content(part): bar += len(chunk) f.write(chunk) bar.finish() return True
def azure_send(self, container, path, mtime): return_code = 0 if not os.path.exists(path): self.logger.error("%s - Error sync file to azure. Details: No such file or directory" % (path)) return_code = 1 else: try: file_size = os.path.getsize(path) if file_size == 0: file_size = 1 self.pbar = progressbar.ProgressBar(widgets=["Sending data to Azure", progressbar.Percentage(), ' ', progressbar.Bar(marker=progressbar.RotatingMarker()), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed()], maxval=file_size).start() self.blob_service.put_block_blob_from_path(container, path, path, progress_callback=self.upload_progress) self.pbar.finish() self.blob_service.set_blob_metadata(container, path, x_ms_meta_name_values={"mtime":str(mtime)}) except (ValueError, ZeroDivisionError): pass except Exception, e: self.logger.error("%s - Error sync file to azure. Details: %s" % (path, str(e))) return_code = 1 pass
def predict(self): df = self.cluster_df ans = [] x_ids = df['x_ID'].as_matrix() y_ids = df['y_ID'].as_matrix() pbar = pb.ProgressBar(widgets=[ "predict:", pb.FileTransferSpeed(unit="pairs"), pb.Percentage(), pb.Bar(), pb.Timer(), " ", pb.ETA() ], maxval=len(df)).start() for i, (x, y) in enumerate(izip(x_ids, y_ids)): pbar.update(i) p = self.score(x, y) ans.append(p) pbar.finish() print >> sys.stderr, "1 rate", np.mean(ans) df['Ans'] = ans df = df[['ID', 'Ans']] df.to_csv(self.args.predict, index=False)
def display_handlers(self): """sleep(1) print('\n\n') buff = self.done pbar = tqdm(total=self.size, unit_scale=1, unit='B', initial=buff, unit_divisor=1024) while (not self.completed) or (self.stoped): done = self.done pbar.update(done - buff) buff = done pbar.close() sleep(0.1)""" print('\n\n') _initial = self.done widgets = [progressbar.Bar(marker="#", left="[", right="]"), progressbar.Percentage(), " | ", progressbar.FileTransferSpeed(), " | ", progressbar.DataSize(), " | ", progressbar.ETA()] bar = progressbar.ProgressBar( widgets=widgets, maxval=self.size, initial_value=_initial).start() while (not self.completed) or (self.stoped): bar.update(self.done) sleep(0.0167) bar.finish()
def download_elevation(self): # init progress bar maxval = len(self.route_waypoints) widgets = [ "Downloading %s elevation" % self.name, progressbar.Percentage(), ' ', progressbar.Bar(marker='=', left='[', right=']'), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed() ] progress_bar = progressbar.ProgressBar(widgets=widgets, maxval=maxval) progress_bar.start() # TODO check if elevation has already been downloaded # download data i = 0 for mileage, waypoint in self.route_waypoints.iteritems(): waypoint.add_elevation() progress_bar.update(i) i += 1 progress_bar.finish() self.write_csv()
def upload_file(filename, bucket, prefix=None): global pbar key = Key(bucket) if prefix: key.key = '%s/%s' % (prefix, filename) else: key.key = '%s' % (filename) size = os.stat(filename).st_size if size == 0: print 'Bad filesize for "%s"' % (filename) return 0 widgets = [ unicode(filename, errors='ignore').encode('utf-8'), ' ', progressbar.FileTransferSpeed(), ' <<<', progressbar.Bar(), '>>> ', progressbar.Percentage(), ' ', progressbar.ETA() ] pbar = progressbar.ProgressBar(widgets=widgets, maxval=size) pbar.start() try: key.set_contents_from_filename( filename, cb=progress_callback, num_cb=100, ) key.set_acl('public-read') except IOError, e: print e return 0
def write_to_csv(self): if self.num_results > 0: self.num_results = sum(1 for line in open(self.tmp_file, 'r')) if self.num_results > 0: self.csv_headers.sort() output_file = open(self.opts.output_file, 'a') csv_writer = csv.DictWriter(output_file, fieldnames=self.csv_headers, delimiter=self.opts.delimiter) csv_writer.writeheader() timer = 0 widgets = ['Write to csv ', progressbar.Bar(left='[', marker='#', right=']'), progressbar.FormatLabel(' [%(value)i/%(max)i] ['), progressbar.Percentage(), progressbar.FormatLabel('] [%(elapsed)s] ['), progressbar.ETA(), '] [', progressbar.FileTransferSpeed(), ']' ] bar = progressbar.ProgressBar(widgets=widgets, maxval=self.num_results).start() for line in open(self.tmp_file, 'r'): timer += 1 bar.update(timer) csv_writer.writerow(json.loads(line)) output_file.close() bar.finish() else: print('There is no docs with selected field(s): %s.' % ','.join(self.opts.fields)) os.remove(self.tmp_file)
def wget(self, url: str, filename: str, chunk_size: int = 4096) -> None: """ Download content of given url to filename :param str url: URI :param str filename: local file path name """ response = requests.request('GET', url, stream=True, data=None, headers=None) widgets = [ 'Loading: ', progressbar.Percentage(), ' ', progressbar.Bar(marker='#', left='[', right=']'), ' ', progressbar.ETA(), ' ', progressbar.FileTransferSpeed() ] progress = progressbar.ProgressBar(maxval=int( response.headers.get('Content-Length') or ''), widgets=widgets).start() processed = 0 with open(filename, 'wb') as data: for chunk in response.iter_content(chunk_size=chunk_size): if chunk: data.write(chunk) data.flush() processed += len(chunk) progress.update(processed) progress.finish()