def example23(): widgets = [BouncingBar(marker=RotatingMarker())] with ProgressBar(widgets=widgets, max_value=20, term_width=10) as progress: for i in range(20): progress.update(i) widgets = [BouncingBar(marker=RotatingMarker(), fill_left=False)] with ProgressBar(widgets=widgets, max_value=20, term_width=10) as progress: for i in range(20): progress.update(i)
def print_status_stream(title, stream): widgets = [title, FormatLabel(''), ' ', Percentage(), ' ', Bar(), ' ', RotatingMarker()] bar = None if sys.stderr.isatty(): bar = progressbar.ProgressBar(widgets=widgets, max_value=255) def print_error(status): print(status['error']) def print_status(status): progress = status.get('progressDetail') if progress: widgets[1] = FormatLabel("%12s" % (status['status'])) prog = int(round(255 * ((progress['current'] / progress['total'])))) if bar is not None: bar.update(prog) def print_unknown(status): print(status) for line in stream: try: status = json.loads(line.decode('utf8')) except json.JSONDecodeError: continue if 'error' in status: print_error(status) elif 'status' in status: print_status(status) else: print_unknown(status)
def download(number=-1, name="", save_dir='./'): """Download pre-trained word vector :param number: integer, default ``None`` :param save_dir: str, default './' :return: file path for downloaded file """ df = load_datasets() if number > -1: row = df.iloc[[number]] elif name: row = df.loc[df["Name"] == name] url = ''.join(row.URL) if not url: print('The word vector you specified was not found. Please specify correct name.') widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets) def dlProgress(count, blockSize, totalSize): if pbar.max_value is None: pbar.max_value = totalSize pbar.start() pbar.update(min(count * blockSize, totalSize)) file_name = url.split('/')[-1] if not os.path.exists(save_dir): os.makedirs(save_dir) save_path = os.path.join(save_dir, file_name) path, _ = urlretrieve(url, save_path, reporthook=dlProgress) pbar.finish() return path
def main(): ''' Controls general flow of operations ''' # If it exists, use the cached data of hosts and items if (os.path.isfile(move_items_file)): with open(move_items_file) as infile: hosts = json.load(infile) logger.info('Cache loaded from file (%s)' % move_items_file) else: hosts = getItems() with open(move_items_file, 'w') as outfile: json.dump(hosts, outfile) logger.info('Cache written to file (%s)' % move_items_file) for host in hosts: logger.info('Geting trends data of host: %s' % host['name']) host['trends'] = list() host['trends_uint'] = list() if host['itens'].__len__() > 0: bar = ProgressBar(maxval=host['itens'].__len__(), widgets=[ Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer() ]).start() i = 0 for item in host['itens']: temp = getTrends(hostname=host['name'], item=item) i += 1 if args.loglevel.upper() != 'DEBUG': bar.update(i) if temp['table'] == 'trends': for value in temp['values']: host['trends'].append( '(%d, %d, %d, %d, %d, %d)' % (int(item['itemid']), int(value[1]), int(value[2]), int(value[3]), int(value[4]), int(value[5]))) elif temp['table'] == 'trends_uint': for value in temp['values']: host['trends_uint'].append( '(%d, %d, %d, %d, %d, %d)' % (int(item['itemid']), int(value[1]), int(value[2]), int(value[3]), int(value[4]), int(value[5]))) else: logger.warning('Unknown value type: %s' % temp['table']) bar.finish() ''' Now, we send in blocks of up to ~1M values to generate the SQL files ''' if host['trends'].__len__() > 0: createSQL(table='trends', values=host['trends'], name=host['name']) elif host['trends_uint'].__len__() > 0: createSQL(table='trends_uint', values=host['trends_uint'], name=host['name']) else: logger.warning('No data from %s found to be sent.' % host['name'])
def example17(): widgets = [FormatLabel('Animated Bouncer: value %(value)d - '), BouncingBar(marker=RotatingMarker())] pbar = ProgressBar(widgets=widgets) for i in pbar((i for i in range(180))): time.sleep(0.05)
def start(self, total, widgets=None): if widgets is None: widgets = [self.title, RotatingMarker(), ' ', Percentage(), ' ', Bar(marker='='), ' ', ETA()] self.pbar = ProgressBar(widgets=widgets, maxval=total) self.pbar.start() self.i = 0
def start_work(self): widgets = ['Step: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] progress_bar = ProgressBar(widgets=widgets, maxval=6) progress_bar.start() if self.server_name == 'huabao': self.__read_parameter_file(MULTIFACTOR_PARAMETER_FILE_PATH_TEMPLATE % self.date_str2, 'StkIntraDayStrategy') self.__read_parameter_file(LEADLAG_PARAMETER_FILE_PATH, 'StkIntraDayLeadLagStrategy') self.__read_basketfile() progress_bar.update(1) self.__download_parameter_file() progress_bar.update(2) self.__divide_tickers() progress_bar.update(3) self.__modify_cfg_local() progress_bar.update(4) self.__upload_tradeplat_file() progress_bar.update(5) if self.server_name == 'huabao': self.__modify_database() self.__save_strategy_intraday_parameter() self.__backup_files() self.__send_email() progress_bar.update(6) progress_bar.finish()
def convert_packagelistingacl(pkg1_sess, pkg2_sess): ''' Convert the PackageListingAcl from pkgdb1 to pkgdb2. ''' cnt = 0 total = pkg1_sess.query(P1PersonPackagelistingAcl).count() done = set() widgets = [ 'ACLs: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA() ] pbar = ProgressBar(widgets=widgets, maxval=total).start() for pkg in pkg1_sess.query(P1PersonPackagelistingAcl).all(): if pkg.acl in ('build', 'checkout'): continue person = pkg1_sess.query(P1PersonPackagelisting).filter( P1PersonPackagelisting.id == pkg.personpackagelistingid).one() new_pkglistacl = model.PackageListingAcl( fas_name=person.username, packagelisting_id=person.packagelistingid, acl=pkg.acl, status=STATUS[pkg.statuscode]) try: pkg2_sess.add(new_pkglistacl) pkg2_sess.commit() except sqlalchemy.exc.IntegrityError, err: # print err pkg2_sess.rollback() cnt += 1 pbar.update(cnt)
def proxy_passive_to_active(): """ status de prxy 5 = active status de prxy 6 = passive """ logger.info('Change all proxys to active') proxys = zapi.proxy.get(output=['shorten', 'host'], filter={'status': 6}) if (proxys.__len__() == 0): logger.info('Done') return bar = ProgressBar( maxval=proxys.__len__(), widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for x in proxys: i += 1 proxyid = x['proxyid'] result = zapi.proxy.update(proxyid=proxyid, status=5) logger.echo = False logger.debug('Changed from passive to active proxy: %s' % (x['host'])) bar.update(i) bar.finish() logger.echo = True logger.info('Done') return
def hosts_disable_all(): """ status de host 0 = enabled status de host 1 = disabled """ logger.info('Disabling all hosts, in blocks of 1000') hosts = zapi.host.get(output=['hostid'], search={'status': 0}) maxval = int(ceil(hosts.__len__()) / 1000 + 1) bar = ProgressBar( maxval=maxval, widgets=[Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer()]).start() i = 0 for i in xrange(maxval): block = hosts[:1000] del hosts[:1000] result = zapi.host.massupdate(hosts=[x for x in block], status=1) i += 1 bar.update(i) bar.finish() logger.info('Done') return
def fit(self, X, y): """Fit.""" X = X[(self.mdlNr*5 % self.subsample_data)::self.subsample_data] y = y[(self.mdlNr*5 % self.subsample_data)::self.subsample_data] if self.applyPreds: if self.jump is not None: X = delay_preds(X, delay=self.delay/self.subsample_data, skip=self.skip/self.subsample_data, jump=self.jump/self.subsample_data) else: X = delay_preds(X, delay=self.delay/self.subsample_data, skip=self.skip/self.subsample_data) self.clf = [] widgets = ['Training : ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' '] pbar = ProgressBar(widgets=widgets, maxval=6) pbar.start() # training separate models for each event for col in range(6): self.clf.append(xgb.XGBClassifier(n_estimators=self.n_estimators, max_depth=self.max_depth, subsample=self.subsample, nthread=self.nthread)) self.clf[col].fit(X, y[:, col]) pbar.update(col)
def dir_walk(target_dir=None, quiet=None): '''recursively walk a directory containing cti and return the stats''' files = find_files('*.xml', resolve_path(target_dir)) if not quiet: widgets = [ 'Directory Walk: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA() ] progress = ProgressBar(widgets=widgets, maxval=len(files)).start() cooked_stix_objs = {'campaigns': set(), 'courses_of_action': set(), \ 'exploit_targets': set(), 'incidents': set(), \ 'indicators': set(), 'threat_actors': set(), \ 'ttps': set()} cooked_cybox_objs = dict() for file_ in files: try: stix_package = file_to_stix(file_) (raw_stix_objs, raw_cybox_objs) = \ process_stix_pkg(stix_package) for k in raw_stix_objs.keys(): cooked_stix_objs[k].update(raw_stix_objs[k]) for k in raw_cybox_objs.keys(): if not k in cooked_cybox_objs.keys(): cooked_cybox_objs[k] = set() cooked_cybox_objs[k].update(raw_cybox_objs[k]) if not quiet: progress.update(i) except: next if not quiet: progress.finish() return (cooked_stix_objs, cooked_cybox_objs)
def copy_bar(src, home): logging.info('Копирование %s в %s', src, home) size = os.path.getsize(src) t = threading.Thread(target=copy, args=( src, home, )) t.setDaemon(True) t.start() tm = 0 while True: if os.path.isfile(os.path.join(home, src)): break tm += 1 time.sleep(0.2) if tm == 10: break name = os.path.basename(src) widgets = [ name + ' ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed() ] pbar = ProgressBar(widgets=widgets, maxval=size).start() while True: cur_size = os.path.getsize(os.path.join(home, name)) pbar.update(cur_size) if cur_size == size: break pbar.finish()
def fit(self, X, y): """Fit.""" self.best_params = None if self.mean != 'simple': if self.verbose: widgets = [ 'Training : ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ' ] self.pbar = ProgressBar(widgets=widgets, maxval=(self.max_evals * len(self.param_space))) self.pbar.start() objective = lambda w: -np.mean([ roc_auc_score(y[:, col], self.calcMean(X[:, col::6], w, training=True)) for col in range(6) ]) self.best_params = fmin(objective, self.param_space, algo=tpe.suggest, max_evals=self.max_evals) if self.verbose: print(self.best_params) else: self.best_params = None
def deleteHostsByHostgroup(groupname): hostgroup = zapi.hostgroup.get(output=['groupid'], filter={'name': groupname}) if hostgroup.__len__() != 1: logger.error('Hostgroup not found: %s\n\tFound this: %s' % (groupname, hostgroup)) groupid = int(hostgroup[0]['groupid']) hosts = zapi.host.get(output=['name', 'hostid'], groupids=groupid) total = len(hosts) logger.info('Hosts found: %d' % (total)) if (args.run): x = 0 bar = ProgressBar(maxval=total, widgets=[ Percentage(), ReverseBar(), ETA(), RotatingMarker(), Timer() ]).start() logger.echo = False for host in hosts: x = x + 1 bar.update(x) logger.debug('(%d/%d) >> Removing >> %s' % (x, total, host)) out = zapi.globo.deleteMonitors(host['name']) bar.finish() logger.echo = True else: logger.info('No host removed due to --no-run arg. Full list of hosts:') for host in hosts: logger.info('%s' % host['name']) return
def denominator(self, batchsize): acth = cp.dev_tensor_float_cm([self.weight.shape[1], batchsize]) actv = cp.dev_tensor_float_cm([self.weight.shape[0], batchsize]) row = cp.dev_tensor_float([batchsize]) cp.fill(acth, 0.0) cp.fill(actv, 0.0) cp.fill(row, 0.0) n = acth.shape[0] nmax = 2**n if nmax % batchsize != 0: print "Error: 2**n=%d must be dividable by batchsize=%d!" % ( nmax, batchsize) sys.exit(1) L = [] widgets = [ "Denominator: ", Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA() ] pbar = ProgressBar(widgets=widgets, maxval=nmax) for i in xrange(0, nmax, acth.shape[1]): cp.set_binary_sequence(acth, i) L.append(self.partialsum(acth, actv, row)) if (i / acth.shape[1]) % 100 == 0: pbar.update(i) pbar.finish() for m in [actv, acth, row]: m.dealloc() return math.fsum(L)
def main(show_progress, *args, **kwargs): # Set up progress bar. if show_progress: progress_bar = ProgressBar(maxval=len(PACKAGES), widgets=[ 'Progress: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' Fetched posts for ', Counter(), ' / ' + str(len(PACKAGES)) + ' packages.' ]) progress_bar.start() # Fetch statistics for posts related to each tag for package_count, package in enumerate(PACKAGES, start=1): records = (Tag.select().join( PostTag, on=(Tag.id == PostTag.tag_id)).join( Post, on=(Post.id == PostTag.post_id)).where( Tag.tag_name == package).select( Tag.tag_name, Post.title, Post.creation_date, Post.answer_count, Post.comment_count, Post.favorite_count, Post.score, Post.view_count).dicts()) yield records if show_progress: progress_bar.update(package_count) if show_progress: progress_bar.finish() raise StopIteration
def fit(self,X,y,Xtest=None,ytest=None): """Fit.""" input_dim = X.shape[1] # set different data preparation schemes basing on what kind of NN is it layers = [i.keys()[0] for i in self.architecture] self.isCNN = 'Conv' in layers self.isRecurrent = 'GRU' in layers or 'LSTM' in layers if self.isCNN: self.addDelay = delay_preds self.training_params['num_strides'] = self.delay//self.skip elif self.isRecurrent: self.addDelay = delay_preds_2d else: input_dim *= self.delay/self.skip input_dim = int( input_dim ) self.addDelay = delay_preds # create the model self.model = buildNN(self.architecture, self.training_params, input_dim) widgets = ['Training : ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' '] pbar = ProgressBar(widgets=widgets, maxval=self.majorEpochs) pbar.start() # train the model on a portion of training data; that portion is changed each majorEpoch for majorEpoch in range(self.majorEpochs): startingPoint = majorEpoch%self.partsTrain or self.mdlNr%self.partsTrain if self.jump is not None: trainData = self.addDelay(X, delay=self.delay, skip=self.skip, subsample=self.partsTrain,start=startingPoint, jump=self.jump) else: trainData = self.addDelay(X, delay=self.delay, skip=self.skip, subsample=self.partsTrain,start=startingPoint) if self.isCNN: trainData = trainData.reshape((trainData.shape[0],1,trainData.shape[1],1)) targets = y[startingPoint::self.partsTrain] trainData = trainData[::self.subsample] targets = targets[::self.subsample] self.model.fit(trainData, targets, nb_epoch=self.smallEpochs, batch_size=512,verbose=0,show_accuracy=True) trainData=None pbar.update(majorEpoch) if self.verbose and majorEpoch%self.checkEveryEpochs == 0: print("Total epochs: %d" % (self.smallEpochs*(majorEpoch+1))) if Xtest is not None and ytest is not None: pred = self._predict_proba_train(Xtest) score = np.mean(roc_auc_score(ytest[0::self.partsTest],pred)) print("Test AUC : %.5f" % (score)) pred = None if self.verbose: print('Training finished after %d epochs'% (self.smallEpochs*(majorEpoch+1)))
def progress_bar(self, file_path, iterations): widgets = [ 'fuzzing ' + file_path + ' ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ' ] return ProgressBar(widgets=widgets, maxval=iterations)
def __init__(self): self.stop_running = threading.Event() self.progress_thread = threading.Thread(target=self.init_progress) self.progress_thread.daemon = True spinner = RotatingMarker() spinner.INTERVAL = datetime.timedelta(milliseconds=100) self.widgets = [ spinner, ' ', Percentage(), ' ', FormatLabel('Calculating patch requirements'), ' ', Bar(), ' ', FormatLabel('') ] self.progress = ProgressBar(redirect_stdout=True, widgets=self.widgets, max_value=100) self.progress.update(0)
def example1(): widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets, maxval=10000000).start() for i in range(1000000): # do something pbar.update(10*i+1) pbar.finish()
def index_with_dupes(values_list, unique_together=2, model_number_i=0, serial_number_i=1, verbosity=1): '''Create dict from values_list with first N values as a compound key. Default N (number of columns assumbed to be "unique_together") is 2. >>> index_with_dupes([(1,2,3), (5,6,7), (5,6,8), (2,1,3)]) == ({(1, 2): (1, 2, 3), (2, 1): (2, 1, 3), (5, 6): (5, 6, 7)}, {(5, 6): [(5, 6, 7), (5, 6, 8)]}) True ''' try: N = values_list.count() except: N = len(values_list) if verbosity: print 'Indexing %d values_lists in a queryset or a sequence of Django model instances (database table rows).' % N index, dupes = {}, {} pbar = None if verbosity and N > min(1000000, max(0, 100000**(1. / verbosity))): widgets = [ '%d rows: ' % N, Percentage(), ' ', RotatingMarker(), ' ', Bar(), ' ', ETA() ] pbar = ProgressBar(widgets=widgets, maxval=len(values_list)).start() rownum = 0 for row in values_list: normalized_key = [ str(row[model_number_i]).strip(), str(row[serial_number_i]).strip() ] normalized_key += [ i for i in range(unique_together) if i not in (serial_number_i, model_number_i) ] normalized_key = tuple(normalized_key) if normalized_key in index: # need to add the first nondupe before we add the dupes to the list if normalized_key not in dupes: dupes[normalized_key] = [index[normalized_key]] dupes[normalized_key] = dupes[normalized_key] + [row] if verbosity > 2: print 'Duplicate model-serial number found. Here are all the rows that match this key:' print dupes[normalized_key] else: index[normalized_key] = row if pbar: pbar.update(rownum) rownum += 1 if pbar: pbar.finish() if verbosity: print 'Found %d duplicate model-serial pairs in the %d records or %g%%' % ( len(dupes), len(index), len(dupes) * 100. / (len(index) or 1.)) return index, dupes
def pBar(size,name=""): #print(Back.CYAN+Fore.BLACK + self.name + Fore.RESET + Back.RESET + Style.RESET_ALL) widgets = [ Back.CYAN+Fore.BLACK ," %s :"%name ,Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed(),Back.RESET + Style.RESET_ALL] if size == 1 : size+=1 pbar = ProgressBar(widgets=widgets, maxval=size-1).start() return pbar
def p_bar(self): widgets = [ 'Download: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ] pbar = ProgressBar(widgets=widgets, maxval=100).start() return pbar
def run(self): widgets = ['Downloading: ', Percentage(), ' ', Bar( marker=RotatingMarker())] p_bar = ProgressBar(widgets=widgets, maxval=self.max_len).start() while(self.max_len - count): self.lock.acquire() p_bar.update(count) self.lock.release() p_bar.finish()
def __init__(self): self.widgets = [ 'Generating new seeds with QuickFuzz ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ' ] self.progress_bar = ProgressBar(widgets=self.widgets) self.outpath = None
def get_progress_bar(num_reads): bar_format = [ RotatingMarker(), " ", SimpleProgress(), Bar(), Percentage(), " ", ETA() ] return ProgressBar(maxval=num_reads, widgets=bar_format).start()
def example25(): widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets, max_value=1000, redirect_stdout=True).start() for i in range(100): # do something pbar += 10 pbar.finish()
def example17(): widgets = [FormatLabel('Animated Bouncer: value %(value)d - '), BouncingBar(marker=RotatingMarker())] pbar = ProgressBar(widgets=widgets) for i in range(24): time.sleep(0.01) pbar.update(i + 1) pbar.finish()
def run_simumation(N0, dim, tau, beta, sigma, nu, rho, total_population, simulation_end_time, initialInfectedCommunity): # Steps steps = int(simulation_end_time * (1.0 / tau)) # Compute the initial population distribution N = compute_population_at_equilibrium(N0, dim, sigma, nu, rho, total_population) print 'average population per cellid: ', np.sum(N, axis=0) # # init the progress bar # widgets = [ 'Simulation: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA() ] pbar = ProgressBar(widgets=widgets, maxval=steps).start() # # # # Inititial Population in each States S = N.copy() I = np.zeros((dim, dim)) R = np.zeros((dim, dim)) # Infect some nodes initital_infection = 100.0 S[initialInfectedCommunity, initialInfectedCommunity] = S[ initialInfectedCommunity, initialInfectedCommunity] - initital_infection I[initialInfectedCommunity, initialInfectedCommunity] = initital_infection # Stack the differents S.I.R. variables in one vector Y = S.reshape(dim * dim).tolist() Y = np.append(Y, I.reshape(dim * dim).tolist()) Y = np.append(Y, R.reshape(dim * dim).tolist()) Sr = [] Ir = [] Rr = [] InfectionMatrix = np.zeros((steps, 255)) for step in xrange(steps): Ytemp = stoc_eqs(Y, tau, beta, gamma, sigma, nu, rho, dim) Ytemp = Ytemp.reshape((3, dim * dim)) Stemp = Ytemp[0].reshape((dim, dim)) Itemp = Ytemp[1].reshape((dim, dim)) Rtemp = Ytemp[2].reshape((dim, dim)) Sr.append(Stemp.sum()) Ir.append(Itemp.sum()) Rr.append(Rtemp.sum()) InfectionMatrix[step, :] = Itemp.sum(axis=0) Y = Ytemp pbar.update(step) pbar.finish() return Sr, Ir, Rr, InfectionMatrix