def __init__(self, maxval, alternate=False): self.done = 0 if alternate: pbar_widgets = [ FormatLabel( 'Processed: %(value)d/%(max)d items (in: %(elapsed)s)'), ' -=- ', Percentage(), ' -=- ', ETA() ] self.pbar = ProgressBar(widgets=pbar_widgets, maxval=maxval, endline_character="\n") else: pbar_widgets = [ SimpleProgress(), ' ', Percentage(), ' ', Bar(), ' ', Timer(), ' ', AdaptiveETA() ] self.pbar = ProgressBar(widgets=pbar_widgets, maxval=maxval) if alternate: self.pbar.update_interval = maxval / 50 self.pbar.start()
def run(symbol_files, output_file, error_file=stderr): symbols = parse_symbol_files(symbol_files) mkdir_p(output_file) maxval = len(symbols) plabel = ProgressLabel() widgets = [Percentage(), Bar(), ' ', plabel, ' ', AdaptiveETA()] progress = ProgressBar(widgets=widgets, maxval=maxval).start() with open(error_file, 'w+') as feh: with open(output_file, 'w+') as fh: writer = csv.writer(fh) writer.writerow(HistoricalStockRecord.header()) for i, s in enumerate(symbols): plabel.update_label(s) progress.update(i) sym = Symbol(s) try: records = sym.get_historical() except: print >>feh, "Failed to download:\t%s" % s continue for record in records: writer.writerow(HistoricalStockRecord.value(record)) progress.update(maxval) progress.finish()
def progress_bar_widgets(unit='items'): return [ SimpleProgress(sep='/'), ' ', Bar(), ' ', FileTransferSpeed(unit=unit), ', ', AdaptiveETA() ]
def migrate(apps, schema_editor, direction): pbar_widgets = [ SimpleProgress(), ' ', Percentage(), ' ', Bar(), ' ', Timer(), ' ', AdaptiveETA() ] for model in ['Comment', 'Newsitem']: queryset = globals()[model].objects.filter(text__isnull=True) if queryset.count() == 0: continue print "Processing " + model pbar = ProgressBar(widgets=pbar_widgets, maxval=queryset.count()) done = 0 pbar.start() tokenizer = RegexpTokenizer(r'\w+') for item in queryset_iterator(queryset, chunksize=20): if item.content: if direction == "forward": item.text = Text.objects.create( text=item.content, wordcount=len(tokenizer.tokenize(item.content))) elif direction == "backwards": item.content = item.text.text item.save() done += 1 pbar.update(done) pbar.finish()
def download_image(img_path, img_url): # Fetch URL url = urllib2.urlopen(img_url) meta = url.info() file_size = int(meta.getheaders("Content-Length")[0]) print_verbose("Downloading image %s (%s)" % (url.geturl(), size(file_size, system=alternative)), 0) # Set progress bar widgets = ['Progress: ', Percentage(), ' ', Bar(), ' ', AdaptiveETA(), ' ', AdaptiveTransferSpeed()] pbar = ProgressBar(widgets=widgets, maxval=file_size).start() # Download f = open(img_path, 'wb') file_size_dl = 0 block_sz = 1024 * 8 while True: buff = url.read(block_sz) if not buff: break file_size_dl += len(buff) f.write(buff) pbar.update(file_size_dl) # Done f.close() pbar.finish() return url.getcode()
def save_img_h5(data, h5_file, cache_size=30, img_size=None): f = h5py.File(h5_file, "w") files = [name for (name, l) in data] labels = np.array([l for (name, l) in data]) n_samples = len(data) files_dset = f.create_dataset("files", data=files) lab_dset = f.create_dataset("labels", data=labels, compression="gzip") img_dset = f.create_dataset("imgs", (n_samples, 3, 256, 256), dtype='float32', compression="gzip") n_step = len(data) / cache_size if n_step % cache_size != 0: n_step += 1 ############# Progressbar ################ widgets = [Percentage(), ' ', Bar(), ' ', ETA(), ' ', AdaptiveETA()] pbar = ProgressBar(widgets=widgets, maxval=len(data)) pbar.start() ######################################### for j in range(n_step): X = preprocess_image_batch(files[j * cache_size:(j + 1) * cache_size], img_size=img_size) img_dset[j * cache_size:(j + 1) * cache_size] = X pbar.update(j * cache_size) pbar.finish() f.close()
def download(self, info): extension = self.check_compatibility(info) basename, _ = os.path.splitext(self.filename) target = '%s.%s' % ( basename, extension ) if os.path.exists(target): raise Exception('target file aready exists... %s' % ( target )) url = ProductHelper.get_adh_url(info) # uses progressbar2 prog = ProgressBar(maxval=100, widgets=[ 'Downloading "%s"... ' % ( os.path.basename(target) ), Percentage(), ' ', Bar(), ' ', AdaptiveETA() ]) def report_callback(count, block_size, total_size): percent = ((count * block_size) / total_size) * 100 prog.update(min(percent, 100)) prog.start() opener = urllib.request.build_opener() opener.addheaders = [ ('User-Agent', 'Audible ADM 6.6.0.19;Windows Vista Build 9200'), ] urllib.request.install_opener(opener) urllib.request.urlretrieve(url, filename = target, reporthook = report_callback) prog.finish() return target
def cleanup(displayCnt=True): cnt = 0 _ = [] widgets = ['Deleting ', Counter(), ' ', Percentage(),\ ' ', Bar(),\ ' ', Timer(),\ ' ', AdaptiveETA()] with ProgressBar(widgets=widgets, maxval= __len__()\ if displayCnt else UnknownLength\ , redirect_stdout=True) as bar: for key in listUniprotKey(): cnt += 1 _.append(key) if cnt % 50 == 0: try: remove(_) except KeyError: print("Missing keys to delete") _ = [] bar.update(cnt) if _: try: remove(_) except KeyError: print("Missing keys to delete") bar.update(cnt)
def parallel_distances(cache_name, elems, query_img_index, worker_fn, ncpu=4, st='test', kwargs={}): lock = threading.Lock() end_works = 0 def increment_finished(x): nonlocal end_works with lock: end_works += 1 cache_dir = os.path.join('./dist_cache', cache_name) if not os.path.exists(cache_dir): os.makedirs(cache_dir) filename = os.path.join(cache_dir, 'd_{}.npy'.format(query_img_index)) n = len(elems) if os.path.isfile(filename): print( 'Graph distances file existing for image {}, cache {}! Loading...'. format(query_img_index, cache_name)) distances = np.memmap(filename, dtype=np.float32, shape=(n, ), mode='r+') else: distances = np.memmap(filename, dtype=np.float32, shape=(n, ), mode='w+') distances[:] = -1 print('Computing {} distances for image {}, cache {}...'.format( n, query_img_index, cache_name)) pbar = ProgressBar(widgets=[Percentage(), Bar(), AdaptiveETA()], maxval=n).start() pool = multiprocessing.Pool(processes=ncpu, initializer=parallel_worker_init, initargs=(distances, elems)) for idx in range(n): pool.apply_async(parallel_worker, args=(query_img_index, idx, worker_fn, kwargs), callback=increment_finished) while end_works != n: pbar.update(end_works) time.sleep(1) pool.close() pool.join() distances.flush() distances_copy = np.copy(distances) del distances return distances_copy
def example18(): widgets = [Percentage(), ' ', Bar(), ' ', ETA(), ' ', AdaptiveETA()] pbar = ProgressBar(widgets=widgets, maxval=500) pbar.start() for i in range(500): time.sleep(0.01 + (i < 100) * 0.01 + (i > 400) * 0.9) pbar.update(i + 1) pbar.finish()
def _initialize_progress_bar(self): """Initializes the progress bar""" widgets = ['Download: ', Percentage(), ' ', Bar(), ' ', AdaptiveETA(), ' ', FileTransferSpeed()] self._downloadProgressBar = ProgressBar( widgets=widgets, max_value=self._imageCount).start()
def myProgressBar(maxval): """Returns a default progress bar with given maxval Default widgets: Percentage, Bar, AdaptiveETA and Timer """ widgets = [Percentage(), ' ', Bar(), " ", AdaptiveETA(), ' ', Timer()] return ProgressBar(widgets=widgets, maxval=maxval)
def example27(): # Testing AdaptiveETA when the value doesn't actually change pbar = ProgressBar(widgets=[AdaptiveETA(), AdaptiveTransferSpeed()], max_value=2, poll=0.0001) pbar.start() pbar.update(1) time.sleep(0.001) pbar.update(1) pbar.finish()
def console_scan_loop(scanners, scan_titles, verbose): """ Scan all the AsyncScanner object printing status to console. Inputs: - scanners -- List of AsyncScanner objects to scan. - scan_titles -- List of string with the names of the world/regionsets in the same order as in scanners. - verbose -- Boolean, if true it will print a line per scanned region file. """ try: for scanner, title in zip(scanners, scan_titles): print("\n{0:-^60}".format(title)) if not len(scanner): print("Info: No files to scan.") else: total = len(scanner) if not verbose: pbar = ProgressBar( widgets=[SimpleProgress(), Bar(), AdaptiveETA()], maxval=total).start() try: scanner.scan() counter = 0 while not scanner.finished: scanner.sleep() result = scanner.get_last_result() if result: logging.debug( "\nNew result: {0}\n\nOneliner: {1}\n".format( result, result.oneliner_status)) counter += 1 if not verbose: pbar.update(counter) else: status = "(" + result.oneliner_status + ")" fn = result.filename fol = result.folder print( "Scanned {0: <12} {1:.<43} {2}/{3}".format( join(fol, fn), status, counter, total)) if not verbose: pbar.finish() except KeyboardInterrupt as e: # If not, dead processes will accumulate in windows scanner.terminate() raise e except ChildProcessException as e: # print "\n\nSomething went really wrong scanning a file." # print ("This is probably a bug! If you have the time, please report " # "it to the region-fixer github or in the region fixer post " # "in minecraft forums") # print e.printable_traceback raise e
def __init__(self, cursor, conn): xml.sax.handler.ContentHandler.__init__(self) self._db_cursor = cursor self._db_conn = conn self._count = 0 self._pbar = ProgressBar( widgets=[Bar(), SimpleProgress(), AdaptiveETA()], maxval=UnknownLength) self.reset()
def __init__(self, generations): widgets = [ 'Generation ', SimpleProgress(), ' (', Percentage(), ') ', Bar(), ' ', AdaptiveETA(), ' ', FileTransferSpeed(unit='Gens') ] self.pbar = ProgressBar(widgets=widgets, maxval=generations) self.current = 1
def get_progress_bar(n): # progress bar pbar_widgets = [ 'IMU Epochs : ', Bar(marker='#'), 'epoch=', Counter(), ' ', Percentage(), ' (', Timer(), ') ', AdaptiveETA(), ' ' ] pbar = ProgressBar(widgets=pbar_widgets, maxval=n).start() return pbar
def get_progress_bar(max_value): widgets = [ 'Processed: ', Counter(), '/{} ['.format(max_value), Percentage(), '] ', Bar(), ' ', Timer(), ' ', AdaptiveETA() ] pbar = ProgressBar(widgets=widgets, maxval=max_value) pbar.start() return pbar
def setup_progressbar(msg, size, format_label=None, simple_progress=False): if not msg.endswith(': '): msg += ': ' if simple_progress: widgets = [msg, SimpleProgress(), ' ', Bar(), ' ', Timer(), ' ', AdaptiveETA()] else: widgets = [msg, Percentage(), ' ', Bar(), ' ', Timer(), ' ', AdaptiveETA()] if format_label is not None: widgets.append(FormatLabel(format_label)) pbar = ProgressBar(widgets=widgets, maxval=size) return pbar
def load_vectors(self, embeddings_folder, logger): logger.info('Loading "{}" word embeddings into memory...'.format( self.alias)) embeddings_path = os.path.join(embeddings_folder, self.embeddings_filename) self.vectors = OrderedDict() widgets = [ Percentage(), ' of {}K'.format(int(self.file_n_lines / 1000)), ' ', Bar(), ' ', Timer(), ' ', AdaptiveETA() ] bar = ProgressBar(widgets=widgets, max_value=self.file_n_lines).start() if self.embeddings_filename.endswith('.gz'): f = gzip.open(embeddings_path, 'rt') else: f = io.open(embeddings_path, 'rt', encoding='utf-8') try: for i, line in enumerate(f): if i % 100 == 0: if i > bar.max_value: bar.max_value = i bar.update(i) line = line.strip() if line: word, vec = line.split(' ', 1) try: np_vec = np.fromstring(vec, sep=' ') if self.embeddings_dimensionality is None: if len(np_vec) < 10: logger.debug("Skipping header") continue else: self.embeddings_dimensionality = len(np_vec) if len(np_vec) == self.embeddings_dimensionality: self.vectors[word] = np_vec except: if logger is not None: logger.debug( "Embeddings reader: Could not convert line: {}" .format(line)) finally: f.close() bar.finish() logger.info('Done!')
def transform(directory_path, function, repeat=None): index = 0 path, dirs, files = os.walk(directory_path).next() file_count = len(files) print('File count:\n' + str(file_count)) print('\nRemember to update configs with transform factor!\n\n') output_directory = directory_path + '/output' output_directory = os.path.normpath(output_directory) if os.path.isdir(output_directory) == False: os.mkdir(output_directory) if repeat != None: # Make the progress bar accurate file_count = file_count * repeat widgets = [ AdaptiveETA(), ' Completed: ', Percentage(), ' (', Counter(), ')' ] pbar = ProgressBar(widgets=widgets, max_value=file_count).start() for filename in os.listdir(directory_path): if filename.endswith(".jpeg") or filename.endswith( ".jpg") or filename.endswith(".JPG") or filename.endswith( ".png"): if repeat == None: image = function(directory_path + '/' + filename) transform_path = output_directory + '/' + function.__name__ + '-' + str( index) + '.jpg' imsave(transform_path, image) index += 1 pbar.update(index) elif repeat > 1: for _ in range(repeat): image = function(directory_path + '/' + filename) transform_path = output_directory + '/' + function.__name__ + '-' + str( index) + '.jpg' imsave(transform_path, image) index += 1 pbar.update(index) continue else: continue pbar.finish()
def progressbar(count): try: from progressbar import Bar, AdaptiveETA, Percentage, ProgressBar except ImportError: print( 'Please download the progressbar library from https://github.com/niltonvolpato/python-progressbar or ' 'run without the --progressbar parameter') sys.exit(1) widgets = [Percentage(), ' ', Bar(), ' ', AdaptiveETA()] progressmax = count pbar = ProgressBar(widgets=widgets, maxval=progressmax) pbar.start() return pbar
def example26(): widgets = [ Percentage(), ' ', Bar(), ' ', ETA(), ' ', AdaptiveETA(), ' ', AdaptiveTransferSpeed(), ] pbar = ProgressBar(widgets=widgets, max_value=500) pbar.start() for i in range(500): time.sleep(0.001 + (i < 100) * 0.0001 + (i > 400) * 0.009) pbar.update(i + 1) pbar.finish()
def make_df(): gmaps = googlemaps.Client(key=os.environ['GMAPS_API_KEY']) points = get_valid_points() df = pd.DataFrame() bar = ProgressBar(widgets=[Percentage(), Bar(), AdaptiveETA()]) work = '1100 McCaslin Blvd #100, Superior, CO 80027, USA' for point in bar(points): lat, lng = point resp = gmaps.distance_matrix(point, work) dist = resp['rows'][0]['elements'][0]['distance']['value'] dur = resp['rows'][0]['elements'][0]['duration']['value'] row = {'lat': lat, 'lng': lng, 'distance': dist, 'duration': dur} df = df.append(row, ignore_index=True) df.to_pickle('data/df.pickle')
def alloc(N, m, max_size): assert(m > max_size) # m is supposed to be a power of 2 # create an array of m buckets # because we are only interested in the size of the buckets # there is no need to do something different from an array of integers buckets = numpy.zeros(m, numpy.int64) remaining_elements = N widgets = ['Processed: ', Counter(), ' elements (', Percentage(), ')', Bar(), Timer(), AdaptiveETA()] pbar = ProgressBar(widgets=widgets, maxval=N) pbar.start() while remaining_elements != 0: # generate a random list length l = random.randint(1, min(max_size, remaining_elements)) # l = int(random.normalvariate(max_size/2, 3)) # while (1 > l or l > min(max_size, remaining_elements)): # l = int(random.normalvariate(max_size / 2, 3)) # print(l) # generate the two locations # remember that the list length are supposed to be power of 2's # we will not pad lists here, but the 'meta buckets' must still have a # size that is a power of 2 n_i = power_of_two(l) meta_buckets_counts = m / n_i B_1 = random.randint(0, meta_buckets_counts-1) B_2 = random.randint(0, meta_buckets_counts-1) count_B_1 = sum(buckets[n_i*B_1: n_i*(B_1+1)]) count_B_2 = sum(buckets[n_i*B_2: n_i*(B_2+1)]) # choose the lesser full chosen_B = B_1 if (count_B_1 > count_B_2): chosen_B = B_2 for j in range(n_i * chosen_B, n_i * chosen_B + n_i): buckets[j] += 1 remaining_elements -= l pbar.update(N-remaining_elements) return buckets
def example20(): """Widgets that behave differently when length is unknown""" widgets = [ '[When length is unknown at first]', ' Progress: ', SimpleProgress(), ', Percent: ', Percentage(), ' ', ETA(), ' ', AdaptiveETA() ] pbar = ProgressBar(widgets=widgets, maxval=UnknownLength) pbar.start() for i in range(20): time.sleep(0.5) if i == 10: pbar.maxval = 20 pbar.update(i + 1) pbar.finish()
def create_progbar_keras(max_value, stateful_metrics=[]): widgets = [ SimpleProgress(format='{value}/{max_value}', new_style=True), ' ', Bar(marker=marker, left='[', right=']', fill='.'), AdaptiveETA(format_not_started='- ETA: --', format_finished=FinishedETAFormat(), format=ETAFormat(), format_zero=' - ETA: 0s', format_NA='- ETA: N/A', new_style=True) ] for metric in stateful_metrics: widgets.append(DynamicMessage(name=metric, format=ValueFormat())) return ProgressBar(max_value=max_value, widgets=widgets, redirect_stdout=True)
def extractEvent(file_events, fctn, state, particle_list): data = [] #array to hold desired data parsed from events #progress bar to show event processing progress pbar = ProgressBar(widgets=['(', SimpleProgress(),') ', Percentage(),' ', Bar(),\ ' ', AdaptiveETA()],maxval=len(file_events)).start() for event in pbar(file_events): #for each event in a file event_data = cuts.getEventData( event) #fetch data from event in usable form if fctn == cuts.getAngle: #use only first two particles for angle plots data.append( fctn(event_data, state, particle_list[0], particle_list[1])) else: #all other plots simply call fctn to extract data.append(fctn(event_data, state, particle_list)) #desired quantity return data #return list of desired data from one file
def progressbar(folder, daysbeforedeleted): try: from progressbar import Bar, AdaptiveETA, Percentage, ProgressBar except ImportError: print( '''Please download the progressbar library from https://github.com/niltonvolpato/python-progressbar or run without the --progressbar parameter''') sys.exit(1) widgets = [Percentage(), ' ', Bar(), ' ', AdaptiveETA()] progressmax = 0 for item in folder.items(): if item.received <= daysbeforedeleted: progressmax += 1 pbar = ProgressBar(widgets=widgets, maxval=progressmax) pbar.start() return pbar
def attack(self, urls, forms): #Load ProgressBar widgets = [ Percentage(), ' ', Bar(), ' ', ETA(), ' ', AdaptiveETA(), ' ', AdaptiveTransferSpeed(), ] # Load config file... user_input = raw_input("[*] Use [E]lite, [G]reat or [N]ormal buster payload?\n") if user_input == "E": self.CONFIG_FILE = "busterElitePayloads.txt" self.pbar = ProgressBar(widgets=widgets, maxval=1292) elif user_input == "G": self.CONFIG_FILE = "busterGreatPayloads.txt" self.pbar = ProgressBar(widgets=widgets, maxval=7372) else: self.CONFIG_FILE = "busterPayloads.txt" self.pbar = ProgressBar(widgets=widgets, maxval=45524) self.payloads = self.loadPayloads(os.path.join(self.CONFIG_DIR, self.CONFIG_FILE)) # First we make a list of uniq webdirs and webpages without parameters for res in urls: path = res.path if path.endswith("/"): if path not in self.known_dirs: self.known_dirs.append(path) else: if path not in self.known_pages: self.known_pages.append(path) # Then for each known webdirs we look for unknown webpages inside for current_dir in self.known_dirs: self.test_directory(current_dir) # Finally, for each discovered webdirs we look for more webpages while self.new_resources: current_res = self.new_resources.pop(0) if current_res.endswith("/"): # Mark as known then explore self.known_dirs.append(current_res) self.test_directory(current_res) else: self.known_pages.append(current_res) self.pbar.finish()