def startUpload(extracted, compressed, pathname): print("") for file in extracted: filesize = path.getsize("extracted/" + pathname + "/" + file) bar = ProgressBar(widgets=[ 'Uploading ' + file + ' to gameserver: ', Percentage(), ' ', Bar("█"), ' ', ETA(), ' | ', Timer(), ' | Speed: ', FileTransferSpeed() ], maxval=filesize) gameserver.put("extracted/" + pathname + "/" + file, GAMESERVER[3] + file, callback=lambda x, y: bar.update(x)) bar.finish() for file1 in compressed: filesize = path.getsize(file1[0]) bar = ProgressBar(widgets=[ 'Uploading ' + file1[1] + ' to webserver: ', Percentage(), ' ', Bar("█"), ' ', ETA(), ' | ', Timer(), ' | Speed: ', FileTransferSpeed() ], maxval=filesize) webserver.put(file1[0], WEBSERVER[3] + file1[1], callback=lambda x, y: bar.update(x)) bar.finish()
def progress_bar(name, maxval): """Manages a progress bar for a download. Parameters ---------- name : str Name of the downloaded file. maxval : int Total size of the download, in bytes. """ if maxval is not UnknownLength: widgets = [ '{}: '.format(name), Percentage(), ' ', Bar(marker='=', left='[', right=']'), ' ', ETA(), ' ', FileTransferSpeed() ] else: widgets = ['{}: '.format(name), ' ', Timer(), ' ', FileTransferSpeed()] bar = ProgressBar(widgets=widgets, maxval=maxval, fd=sys.stdout).start() try: yield bar finally: bar.update(maxval) bar.finish()
def call_steam_update(cmd, dir=None, progress_reg="Update state \(0x61\) downloading", progressbar=True): if dir is None: dir = getScriptPath() events = {} if (progressbar): events = {progress_reg: call_progressbar} ret = pexpect.run(dir + "/" + cmd, events=events, extra_args=[ False, [ 'Download Update: ', Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed() ], None ], withexitstatus=True) print(ret) return True
def start_work(self): widgets = ['Step: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] progress_bar = ProgressBar(widgets=widgets, maxval=6) progress_bar.start() if self.server_name == 'huabao': self.__read_parameter_file(MULTIFACTOR_PARAMETER_FILE_PATH_TEMPLATE % self.date_str2, 'StkIntraDayStrategy') self.__read_parameter_file(LEADLAG_PARAMETER_FILE_PATH, 'StkIntraDayLeadLagStrategy') self.__read_basketfile() progress_bar.update(1) self.__download_parameter_file() progress_bar.update(2) self.__divide_tickers() progress_bar.update(3) self.__modify_cfg_local() progress_bar.update(4) self.__upload_tradeplat_file() progress_bar.update(5) if self.server_name == 'huabao': self.__modify_database() self.__save_strategy_intraday_parameter() self.__backup_files() self.__send_email() progress_bar.update(6) progress_bar.finish()
def upload_file(filename): global pbar repo = Repository(testsettings.FEDORA_ROOT_NONSSL, testsettings.FEDORA_USER, testsettings.FEDORA_PASSWORD) filesize = os.path.getsize(filename) widgets = [ 'Upload: ', Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed() ] # set initial progressbar size based on file; will be slightly larger because # of multipart boundary content pbar = ProgressBar(widgets=widgets, maxval=filesize).start() def upload_callback(monitor): # update the progressbar to actual maxval (content + boundary) pbar.maxval = len(monitor) # update current status pbar.update(monitor.bytes_read) with open(filename, 'rb') as f: upload_id = repo.api.upload(f, callback=upload_callback) pbar.finish() print upload_id
def Copy_Try(self): pwd = listdir('.') Files = [i for i in pwd if path.isfile(i)] Directories = [i for i in pwd if path.isdir(i)] widgets = [ 'Checking: ', Percentage(), ' ', Bar(marker='0', left='[', right=']'), ' ', ETA(), ' ', FileTransferSpeed() ] bar = ProgressBar(widgets=widgets, maxval=len(pwd)) x = 1 bar.start() for dirname in Directories: try: copytree(dirname, self.dest + str(self.usbNum) + sep + dirname) bar.update(x) x += 1 except: bar.update(x) x += 1 for file in Files: try: copyfile(file, self.dest + str(self.usbNum) + sep + file) bar.update(x) x += 1 except: bar.update(x) x += 1 bar.finish()
def download(url, fileName, progress=True): ''' Download a file from http either with or without a progress bar url = url to download fileName = filename to save to progress = display progress bar (True/False) ''' if progress is True: widgets = [ 'Test: ', Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed() ] pbar = ProgressBar(widgets=widgets) def dlProgress(count, blockSize, totalSize): if pbar.maxval is None: pbar.maxval = totalSize pbar.start() pbar.update(min(count * blockSize, totalSize)) urllib.urlretrieve(url, fileName, reporthook=dlProgress) pbar.finish() else: urllib.urlretrieve(url, fileName)
def download(self, out_dir='', filename=None, show_progress=True): if filename is None: filename = self.make_filename(base=out_dir) if show_progress: widgets = [ Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed() ] self.pbar = ProgressBar(widgets=widgets) try: self.pbar.maxval = int(self.meta.get('fileSize')) except: pass reporthook = self._on_progress self.pbar.start() else: reporthook = noop try: makedirs(path.dirname(filename)) except OSError as err: if err.errno != 17: # File already exists print '{}: {}'.format(err.strerror, err.filename) sys.exit(err.errno) urlretrieve(self.location, filename=filename, reporthook=reporthook) if show_progress: self.pbar.finish()
def __init__(self): self.progress_bar = ProgressBar(widgets=[ Percentage(), Bar(marker='=', left='[', right=']'), ' ', FileTransferSpeed(), ' ', Timer(format='%s') ])
def _post_file(self, endpoint, params, data, pb=True): # Register the streaming http handlers with urllib2 register_openers() widgets = [ 'Transfering Model: ', Bar(), Percentage(), ' ', ETA(), ' ', FileTransferSpeed() ] pbar = ProgressBar(widgets=widgets).start() def progress(param, current, total): if not param: return pbar.maxval = total pbar.update(current) # headers contains the necessary Content-Type and Content-Length # datagen is a generator object that yields the encoded parameters f = tempfile.NamedTemporaryFile(mode='wb', prefix='tmp_yhat_', delete=False) model_name = data['modelname'] + ".yhat" try: data = json.dumps(data) except UnicodeDecodeError, e: raise Exception( "Could not serialize into JSON. String is not utf-8 encoded `%s`" % str(e.args[1]))
def save_video(url, directory, filename, extension): path = os.path.join(directory, "%s.%s" % (filename, extension)) temp = "%s.tmp" % path # Temp file already exists, so we'll bin it and try again if os.path.exists(temp): os.remove(temp) os.remove(path) if not os.path.exists(path): response = session.get(url, stream=True) with open(temp, 'w') as f: f.write('\n') with open(path, 'wb') as f: total_length = int(response.headers.get('Content-Length')) widgets = [ 'Saving %s: ' % path, Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed() ] pbar = ProgressBar(widgets=widgets, maxval=total_length).start() bytes_downloaded = 0 for chunk in response.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() bytes_downloaded += len(chunk) pbar.update(bytes_downloaded) pbar.finish() # Delete temp file os.remove(temp)
def download(url, build_name, sha256_digest): logger.debug("Downloading file from %s" % url) r = requests.get(url, stream=True) r.raise_for_status() size = int(r.headers['Content-Length'].strip()) total_bytes = 0 widgets = [ build_name, ": ", Bar(marker="|", left="[", right=" "), Percentage(), " ", FileTransferSpeed(), "] of {0}MB".format(str(round(size / 1024 / 1024, 2))[:4])] pbar = ProgressBar(widgets=widgets, maxval=size).start() m = hashlib.sha256() file_data = [] for buf in r.iter_content(1024): if buf: file_data.append(buf) m.update(buf) total_bytes += len(buf) pbar.update(total_bytes) pbar.finish() if m.hexdigest() != sha256_digest: raise Exception("Digest mismatch for url %s" % url) return b''.join(file_data)
def __init__(self, remote_addr, mem_size, progressbar=False, recv_size=1048576, sock_timeout=1): """ :type remote_addr: str :param remote_addr: hostname or ip address of target server :type mem_size: int :param mem_size: target server memory size in bytes :type progressbar: bool :param progressbar: ncurses progress bar toggle :type recv_size: int :param recv_size: transfer socket max receive size :type sock_timeout: int :param sock_timeout: transfer socket receive timeout """ self.mem_size = mem_size self.progressbar = progressbar self.recv_size = recv_size self.sock_timeout = sock_timeout self.padding_percentage = 0.03 self.max_size = self.max_size(mem_size, self.padding_percentage) self.update_interval = 5 self.update_threshold = recv_size * self.update_interval self.remote_addr = remote_addr self.transfered = 0 self.progress = 0 self.widgets = [' {0} '.format(remote_addr), Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed()] self.sock = None self.outfile = None self.bar = None
def simulate(self, scheme, n=5): """ This is the function that exceutes all functions necessary to run the SPH simulation after initialisation :param scheme: str This is a time-stepping scheme - can either be forward euler or predictor corrector method :param n: int Set the interval at which to save frames to an animation :return: int """ t = self.t0 time_array = [t] self.allocate_to_grid() cnt = 0 filename = 'datafile_3.pkl' file = open(filename,'wb') # generate a progressbar widgets = ['Progress: ',Percentage(), ' ', Bar('$'),' ', Timer(), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets, maxval=int(self.t_max/self.dt)+1).start() i = 0 count = 0 # Measure max speed for printing results max_speed = 0.0 while t < self.t_max: cnt = cnt + 1 smooth = False # Smooth after some time steps if cnt % 10 == 0: smooth = True scheme(self.particle_list, smooth=smooth) # Calculate max speed for printing results cur_max_speed = np.amax(np.array([np.linalg.norm(part.v) for part in self.particle_list])) if cur_max_speed > max_speed: max_speed = cur_max_speed print('Time', t) t = t + self.dt # save file every n dt if cnt % n == 0: pickle.dump(self.particle_list, file, -1) pickle.dump(t, file) count += 2 f = open('countnum.txt','w') f.write(str(count)) time_array.append(t) i += 1 pbar.update(i) file.close() pbar.finish() print("") print("RESULTS OF SIMULATION") print("==================================================") # From last particle_list, find out how many particles have leaked if len(self.particle_list) < self.initial_particle_size: print("WARNING: You have some particle leakage") print("Number of particles leaked: %d" % self.initial_particle_size) elif len(self.particle_list) == self.initial_particle_size: print("+ You had no particle leakage") print("+ Maximum speed of particles:%2.f m/s" % max_speed) print("")
def copy_bar(src, home): logging.info('Копирование %s в %s', src, home) size = os.path.getsize(src) t = threading.Thread(target=copy, args=( src, home, )) t.setDaemon(True) t.start() tm = 0 while True: if os.path.isfile(os.path.join(home, src)): break tm += 1 time.sleep(0.2) if tm == 10: break name = os.path.basename(src) widgets = [ name + ' ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed() ] pbar = ProgressBar(widgets=widgets, maxval=size).start() while True: cur_size = os.path.getsize(os.path.join(home, name)) pbar.update(cur_size) if cur_size == size: break pbar.finish()
def do_histogram(dist, shape, mu=1): print(('reading %s' % dist._v_name)) print(shape) X1 = np.zeros(shape, dtype=np.double) X2 = np.zeros(shape, dtype=np.double) X3 = np.zeros(shape, dtype=np.double) X4 = np.zeros(shape, dtype=np.double) print('computing ... ') widgets = [FileTransferSpeed(), ' ', Bar(), ' ', Percentage(), ' ', ETA()] pbar = ProgressBar(widgets=widgets) lmu = np.log(mu) for row in pbar(dist): n, x, W = row.fetch_all_fields() W = np.exp(np.log(W) - n * lmu) X1[n] += W * x X2[n] += W * x**2 X3[n] += W * x**3 X4[n] += W * x**3 return np.column_stack([X1, X2, X3, X4])
def main(): uri, outfile, dataset = get_arguments() fd = tempfile.NamedTemporaryFile() progress = ProgressBar(widgets=[ Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed() ]) def update(count, blockSize, totalSize): if progress.maxval is None: progress.maxval = totalSize progress.start() progress.update(min(count * blockSize, totalSize)) urllib.request.urlretrieve(uri, fd.name, reporthook=update) if dataset == 'zinc12': df = pandas.read_csv(fd.name, delimiter='\t') df = df.rename(columns={'SMILES': 'structure'}) df.to_hdf(outfile, 'table', format='table', data_columns=True) elif dataset == 'chembl22': df = pandas.read_table(fd.name, compression='gzip') df = df.rename(columns={'canonical_smiles': 'structure'}) df.to_hdf(outfile, 'table', format='table', data_columns=True) pass else: df = pandas.read_csv(fd.name, delimiter='\t') df.to_hdf(outfile, 'table', format='table', data_columns=True)
def progress_bar_widgets(unit='items'): return [ SimpleProgress(sep='/'), ' ', Bar(), ' ', FileTransferSpeed(unit=unit), ', ', AdaptiveETA() ]
def __init__(self, chrome_num=1, chrome_size=4, population=20, generation=10, top_n=5, cross_rate=0.8, elitism_rate=0.1, Range=[ None, ], tol=1e-4, verbose=0): super(Adaptive_Genetic_Algorithm, self).__init__(chrome_num=chrome_num, chrome_size=chrome_size, population=population, generation=generation, top_n=top_n, Range=Range, tol=tol, verbose=verbose) self.elitism_rate = elitism_rate self.cross_rate = cross_rate self.alg_name = 'Adaptive_GA' widgets = [ self.alg_name + ': ', Percentage(), ' ', Bar('#'), ' ', Timer(), ' ', ETA(), ' ', FileTransferSpeed() ] self.pbar = ProgressBar(widgets=widgets, maxval=10 * generation)
def example1(): widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets, maxval=10000000).start() for i in range(1000000): # do something pbar.update(10*i+1) pbar.finish()
def setup_progressbar(self): from progressbar import ProgressBar, FileTransferSpeed, Bar, Percentage, ETA return ProgressBar(widgets=[ FileTransferSpeed(), ' <<<', Bar(), '>>> ', Percentage(), ' ', ETA() ])
def init_widgets(counter, t): return [ 'Progress: ', SimpleProgress('/') if counter else Percentage(), ' ', Bar(marker='>'), ' ', ETA(), ' ', FileTransferSpeed() if t is None else EventSpeed(t) ]
def _initialize_progress_bar(self): """Initializes the progress bar""" widgets = ['Download: ', Percentage(), ' ', Bar(), ' ', AdaptiveETA(), ' ', FileTransferSpeed()] self._downloadProgressBar = ProgressBar( widgets=widgets, max_value=self._imageCount).start()
def __init__(self, size): self.size = int(size.split(' ')[-1]) p_format = '%(percentage)3d%% ' self.pbar =\ ProgressBar(widgets=[Percentage(format=p_format), ETA(), FileTransferSpeed()], max_value=self.size) self.counter = 0
def main(): # ---------------------------------- Training ------------------------------------ # print('\n//------------------- Training ------------------\\\\') # team data new_weights = {} for team_str in teams_str: team = getTeamGames(nba, team_str) team = addDefensiveStats(team) team = addStatAverages(team) print('\n', team_str) training_inputs = numpy.array(team[input_categories].values) training_outputs = numpy.array([team['W/L'].values]) # translate wins and losses into 1's and 0's training_outputs[training_outputs == 'W'] = 1 training_outputs[training_outputs == 'L'] = 0 training_outputs = numpy.array(training_outputs).T numpy.random.seed(1) random_weight = numpy.random.random((len(training_inputs[0]), 1)) syn_weights = 2 * random_weight - 1 widgets = [ Bar(marker='=', left='[', right=']'), ' ', Percentage(), ' ', ETA(), ' ', FileTransferSpeed() ] range_val = MILLION team_weights = numpy.array( [weights[weights['team'] == team_str].iloc[0].values[1:]]).T new_team_weights = trainTeam(training_inputs, training_outputs, syn_weights, acceptable_accuracy=80) new_weights[team_str] = new_team_weights new_weights_str = 'team,PTS Avg.,FGM Avg.,FGA Avg.,FG% Avg.,3PM Avg.,3PA Avg.,3P% Avg.,FTM Avg.,FTA Avg.,FT% Avg.,OREB Avg.,DREB Avg.,REB Avg.,AST Avg.,STL Avg.,BLK Avg.,TOV Avg.,PF Avg.,+/- Avg.,Opp. PTS Avg.,Opp. FGM Avg.,Opp. FGA Avg.,Opp. FG% Avg.,Opp. 3PM Avg.,Opp. 3PA Avg.,Opp. 3P% Avg.,Opp. FTM Avg.,Opp. FTA Avg.,Opp. FT% Avg.,Opp. OREB Avg.,Opp. DREB Avg.,Opp. REB Avg.,Opp. AST Avg.,Opp. STL Avg.,Opp. BLK Avg.,Opp. TOV Avg.,Opp. PF Avg.,Opp. +/- Avg.\n' for team_str in teams_str: new_weights_str = new_weights_str + (team_str + ',' + ','.join( map(str, numpy.array(new_weights[team_str]).T[0]))) + '\n' # -------------------------------- Training Output -------------------------------- # # print('\n', new_weights, '\n\n') if '\r\n' in new_weights_str: new_weights_str = new_weights_str.replace('\r\n', '\n') if '\n\n' in new_weights_str: new_weights_str = new_weights_str.replace('\n\n', '\n') if not os.path.exists('Weights/'): os.makedirs('Weights/') # writes to csv with open('Weights/weights_' + season_years + '.csv', 'w') as filetowrite: filetowrite.write(new_weights_str) filetowrite.close()
def __init__(self, filename, size): widgets = [ os.path.basename(filename), ": ", Bar(marker=">", left="[", right="]"), ' ', Timer(), ' ', FileTransferSpeed(), " ", "{0}MB".format(round(size / 1024 / 1024, 2)) ] super(DownloadProgressBar, self).__init__(widgets=widgets, maxval=size)
def __init__(self, ncpu, func, pairs, name: str = None): self.name = name self.func = func self.pairs = pairs self.parse_ncpu(ncpu) self.widgets = ['Progress: ', Percentage(), ' ', Bar('#'), ' ', Timer(), ' ', ETA(), ' ', FileTransferSpeed()] self.verbose = True self.updated_indeces = []
def do_experiments(exp_params_list, nb_trials, root_path, exp_script="experiment.py", job_queue=None): if job_queue is None: job_queue = AvakasJobQueue(exp_script) pbar = ProgressBar(widgets=[ 'Create jobs :', Percentage(), ' ', Bar(), ETA(), '|', FileTransferSpeed() ], maxval=nb_trials * len(exp_params_list)).start() for i, exp_params in enumerate(exp_params_list): launch_experiment(exp_params, root_path, job_queue, nb_trials=nb_trials, pbar=pbar) pbar.finish() pbar = ProgressBar( widgets=['Running jobs :', Percentage(), ' ', Bar(), Timer()], maxval=nb_trials * len(exp_params_list)).start() all_finished = False i = 0 while not all_finished: i += 1 all_finished = True total_nb_finished = 0 for exp_params in exp_params_list: nb_finished = get_progress(exp_params, root_path, job_queue, nb_trials=nb_trials) all_finished &= (nb_finished == nb_trials) total_nb_finished += nb_finished pbar.update(total_nb_finished) if not all_finished: time.sleep(job_queue.advise_sleep) pbar.finish() results_list = [] for exp_params in exp_params_list: results_list.append( get_results(exp_params, root_path, job_queue, nb_trials=nb_trials)) return results_list
def example25(): widgets = ['Test: ', Percentage(), ' ', Bar(marker=RotatingMarker()), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=widgets, max_value=1000, redirect_stdout=True).start() for i in range(100): # do something pbar += 10 pbar.finish()
def __init__(self, totalsize, params): widgets = [ params, Percentage(), ' ', Bar(marker='=', left='[', right=']'), ' ', ETA(), ' ', FileTransferSpeed() ] self.pbar = ProgressBar(widgets=widgets, maxval=totalsize).start()