def save_response_content(response, destination): CHUNK_SIZE = 32768 with open(destination, "wb") as f: sp = Spinner("Downloading ... ") for chunk in response.iter_content(CHUNK_SIZE): if chunk: f.write(chunk) sp.next()
def display_pick(pick): # Note: this does noting really, commit out if you dont want wait for the loading bar spinner = PixelSpinner('Finding a movie! ') for i in range(10): time.sleep(.3) spinner.next() print(f'\nGet ready to watch! {pick[1]}')
def run(self, program): """" run: Run a Grbl 'program'. A 'program' can be: - Plain text GCode file. - GCode commands in a list. - Python GCode object. - Any class with a 'buffer' property where 'buffer' is a list of GCode commands. """ if isinstance(program, list): pass elif isinstance(program, str): program = program.splitlines() elif hasattr(program, "buffer"): program = program.buffer else: raise Exception(f"Unknown Program {type(program)}\n{program}") # Strip whitespace and force letters to capital. program = [line.strip().upper() for line in program] # Strip out all whitespace program = [line.replace(" ", "") for line in program] t1 = time.time() self.serial.flushInput() # Create list to store the number of bytes we think are in memory. buffer_bytes = list() try: # For each line in the program" with PixelSpinner("Sending...") as bar: for program_line in tqdm(program): bar.next() bytes_written = self.write(program_line) buffer_bytes.extend(bytes_written) results = self.read(multiline=True, timeout=0.1) # While we wait on grbl to respond with an ok. while len(results) == 0: # Wait time.sleep(0.25) # Try again results = self.read(multiline=True, timeout=0.1) time.sleep(0.5) # While the command is running: with PixelSpinner("Running...") as bar: while "Run" in self.status: time.sleep(0.1) bar.next() except KeyboardInterrupt: # Halt the machine on a keyboard interrupt. self.cmd("!") print("^C") return time.time() - t1
class Animate: """handles progress bars and spinners""" def __init__(self, num): self.bar = Bar(max=num) self.spinner = PixelSpinner() def next(self): """advance the spinner and bar to the next iteration in cycle""" self.bar.next() self.spinner.next() def done(self): """complete the bar""" self.bar.finish()
def watch(watch_dir, output_dir): """ :param watch_dir: Directory to watch for new Premiere Pro project files. :param output_dir: Directory to place downgraded Premiere Pro project files. :return: """ event_handler = MyEventHandler( watch_dir, output_dir) # instantiate event handler class from begining of file observer = Observer() observer.schedule(event_handler, path=watch_dir, recursive=False) # set the observer parameters observer.start() # start watching the directory try: with PixelSpinner('...Watching target directory...') as bar: while True: time.sleep(0.06) bar.next() while True: time.sleep(2) except SystemExit: print('Quiting. Feel free to try again!') except KeyboardInterrupt: observer.stop() observer.join()
def converter(name, ext, path, form): with PixelSpinner(f'{name} Processing... ') as bar: for i in range(100): sleep(0.03) bar.next() sound = AudioSegment.from_mp3(name + ext) sound.export(f"{path}/export/{name}.{form}", format=form)
def training(self): """Kicks off the training suite. To begin training, the model needs to be in the 'ready' state. Training continues until the model reaches the 'available' state. Args: -None Returns: - a completion of the training acknowledgement """ if self.customization_id is None: raise ValueError("No customization id provided!") if self.customization_id: # check status with PixelSpinner( "Allocating resources to begin training ") as bar: while self.model_status() != 'ready': sleep(0.1) bar.next() response = requests.post( f'{self.url}/v1/customizations/{self.customization_id}/train', auth=('apikey', self.API_KEY)) if response.status_code == 200: print("Training Beginning") with PixelSpinner(f"Training {self.name} ") as bar: while self.model_status() != 'available': sleep(0.1) bar.next() print("Training has finished") response = json.loads(response.text) return response else: raise Exception(response.text)
def mergeHashtags(self): """Merge the raw_hashtags table from the leaderboard into osmstats""" log = PixelSpinner( "Merging leaderboard hashtags table into Galaxy osmstats database. this may take a will..." ) self.timer.start() inquery = "SELECT changeset_id,hashtag FROM raw_changesets_hashtags INNER JOIN raw_hashtags ON (raw_changesets_hashtags.hashtag_id = id);" self.indb.dbcursor.execute(inquery) self.timer.stop() for entry in self.indb.dbcursor.fetchall(): log.next() outquery = "INSERT INTO changesets(id,hashtags) VALUES({id}, ARRAY['{hashtags}']) ON CONFLICT(id) DO UPDATE SET id=" # watch out for single quotes in user names fixed = entry[1].replace("'", "'") outquery += str( entry[0] ) + ", hashtags=ARRAY_APPEND(changesets.hashtags, '" + fixed + "')" self.outdb.dbcursor.execute( outquery.format(id=int(entry[0]), hashtags=fixed))
def __init__(self, indb=None, outdb=None, host=None): """Load a small bounding box for each country using the modified raw_countries table. Included in the source code for osm-stats-workers is a GeoJson file with the boundries used to display country boundaries. As those boundaries were only used by the front end, the boundaries are not in the database. The modified raw_countries table is the same data with a new column added for the geometry.""" if indb is None: indb = "leaderboard" self.indb = Pydb(indb, host) self.countries = dict() geoquery = "SELECT id,St_AsText(ST_Envelope(ST_Buffer(ST_Centroid(boundary), 1, 4))) FROM raw_countries;" log = PixelSpinner("Loading Country boundaries...") self.indb.dbcursor.execute(geoquery) for row in self.indb.dbcursor.fetchall(): log.next() self.countries[row[0]] = row[1] self.timer = Timer() if outdb is None: outdb = "galaxy" self.outdb = Pydb(outdb, host)
def test2(): with PixelSpinner('Communicating with server...') as bar: for i in range(100): sleep(0.06) bar.next() print('Processing server data...') for i in progress.mill(range(100)): sleep(0.02) print('Resolving dependencies...') for i in progress.mill(range(100)): sleep(0.02) print("action is complete.")
def delete_model(url: str = None, api_key: str = None, customization_id: str = None) -> bool: """ Deletes the models with the passed configuration ids. The function accepts the url and apikey of the instance along with the customization id of the model that needs to be deleted. Args: url api_key customization_id: a unique identifier for a custom stt model Returns a boolean to siginify whether deleting the model was succcessful """ try: response = requests.delete( f'{url}/v1/customizations/{customization_id}', auth=('apikey', api_key)) if response.status_code == 200: print() with PixelSpinner( f"Deleting model with id: {customization_id} ") as bar: while not WatsonSTT.model_deletion_checker( url, api_key, customization_id): sleep(0.01) bar.update() print(f"Model {customization_id} Succesfully Deleted") print() return True else: raise Exception(response.text) except Exception as e: print(e)
def __init__(self, num): self.bar = Bar(max=num) self.spinner = PixelSpinner()
from clint.textui import progress click.secho('Progress - BAR', bold=True) with Bar('Processing...') as bar: for i in range(100): time.sleep(0.02) bar.next() click.secho('Progress - PixelBar', bold=True) with PixelBar('Processing...') as bar: for i in range(100): time.sleep(0.02) bar.next() click.secho('Progress - PixelSpinner', bold=True) with PixelSpinner('Processing...') as bar: for i in range(100): time.sleep(0.02) bar.next() click.secho('\nProgressbar2', bold=True) for i in progressbar(range(100), redirect_stdout=True): time.sleep(0.02) click.secho('\nTQDM', bold=True) for i in tqdm(range(100)): time.sleep(0.02) click.secho('TQDM - With description', bold=True) pbar = tqdm(list(string.ascii_lowercase)) for letter in pbar:
def progress(threadName, delay): with PixelSpinner('Waiting for multicast message on port %s ' % (PORT)) as bar: while True: time.sleep(delay) bar.next()
def mergeStatistics(self, timestamp): """Merge the raw_changesets table from the leaderboard into osmstats""" log = PixelSpinner( "Merging leaderboard statistics into Galaxy osmstats database") log.next() self.timer.start() query = "SELECT id, road_km_added, road_km_modified, waterway_km_added, waterway_km_modified, roads_added, roads_modified, waterways_added, waterways_modified, buildings_added, buildings_modified, pois_added, pois_modified, editor, user_id, created_at, closed_at, updated_at,country_id FROM raw_changesets INNER JOIN raw_changesets_countries ON (raw_changesets_countries.changeset_id = id);" self.indb.dbcursor.execute(query) self.timer.stop() result = self.indb.dbcursor.fetchone() while result is not None: stats = dict() added = dict() modified = dict() # non statistics fields stats['change_id'] = result[0] stats['editor'] = result[11] stats['user_id'] = result[14] stats['created_at'] = result[15] stats['closed_at'] = result[16] if stats['created_at'] is None and stats['closed_at'] is None: result = self.indb.dbcursor.fetchone() continue if stats['created_at'] is None: stats['created_at'] = stats['closed_at'] if stats['closed_at'] is None: stats['closed_at'] = stats['created_at'] stats['updated_at'] = result[17] stats['country_id'] = result[18] if self.getBbox(result[18]) is None: logging.warning( "Country ID %s is not in the geoboundaries table" % result[18]) result = self.indb.dbcursor.fetchone() continue stats['bbox'] = "ST_Multi(ST_GeomFromText('" stats['bbox'] += self.getBbox(result[18]) + "')" # Added fields added['highway_km'] = result[1] added['waterway_km'] = result[3] added['highways'] = result[4] added['waterways'] = result[7] added['buildings'] = result[9] added['pois'] = result[11] # Modified fields modified['highway_km'] = result[2] modified['waterway_km'] = result[4] modified['highways'] = result[6] modified['waterways'] = result[8] modified['buildings'] = result[10] modified['pois'] = result[12] # Get the next row, since we're done with this one result = self.indb.dbcursor.fetchone() # Build the hstore for the added statistics hadd = "HSTORE(ARRAY[" for key, value in added.items(): hadd += "ARRAY['" + key + "','" + str(value) + "']," length = len(hadd) - 1 hadd = hadd[:length] hadd += "])" # Build the hstore for the added statistics hmod = "HSTORE(ARRAY[" for key, value in modified.items(): hmod += "ARRAY['" + key + "','" + str(value) + "']," length = len(hmod) - 1 hmod = hmod[:length] hmod += "])" query = "INSERT INTO changesets(id, editor, user_id, created_at, closed_at, updated_at, added, modified, bbox)" query += " VALUES({id}, '{editor}', {user_id}, '{created_at}', '{closed_at}', '{updated_at}', {add}, {mod}, {bbox})) ON CONFLICT(id) DO UPDATE SET editor='{editor}', user_id={user_id}, created_at='{created_at}', closed_at='{closed_at}', updated_at='{updated_at}', added={add}, modified={mod}, bbox={bbox});" outquery = query.format(id=stats['change_id'], editor=stats['editor'], user_id=stats['user_id'], created_at=stats['created_at'], closed_at=stats['closed_at'], updated_at=stats['updated_at'], bbox=stats['bbox'], add=hadd, mod=hmod) #print(outquery) self.outdb.dbcursor.execute(outquery)
def showProgress(): with PixelSpinner('Entschlüsselung läuft...') as bar: for i in range(100): sleep(0.06) bar.next()
def trainer(): existing_files = get_file_list() if not existing_files: print("No samples files found. Come back when you have samples.") exit() target_filenames = get_existing_filename(existing_files, True) if len(target_filenames) == 0: print("No files selected, quitting...") exit() sequence = [] for target_filename in target_filenames: samples = get_sequence_from_file(target_filename) sequence.extend(samples) timings_sequences = [] compared_size = None print("") for raw_sample in sequence: parser = SampleParser(raw_sample) timings_sequences.append(parser.timings) if compared_size is None: compared_size = parser.timings[-1] else: if parser.timings[-1] != compared_size: print( "Error, one sample has a different size ({}), removing it". format(parser.timings[-1])) del timings_sequences[-1] model = Model() print("{} samples".format(len(timings_sequences))) # Build the data trueData = [smp[:smp[-1]] for smp in timings_sequences if smp[-2] == 1] fakeData = [smp[:smp[-1]] for smp in timings_sequences if smp[-2] == 0] # Split for training/optimization and final evaluation train, test = train_test_split(trueData, train_size=0.8, test_size=None) print("{} samples from user".format(len(trueData))) print(" {:3d} samples for training".format(len(train))) print(" {:3d} samples for testing".format(len(test))) print("{} samples from impostor\n".format(len(fakeData))) spinner = PixelSpinner("Fitting data to the model... ", ) spinner.start() # Create a thread for the spinner t = Thread(target=spinner_loop, args=(spinner, )) t.do_run = True t.start() # Train and optimize params = Model.findParameters(model, train) t.do_run = False t.join() print("") # Print a report on the training/optimization phase # evaluate = Model.evaluate(params["model"], train, test) # Print a final evaluation of the model agains impostors data report = Model.report(params["model"], train, test, fakeData) print_report(report) save_model = get_binary_validation("Do you want to keep this model ?", True) if save_model: filename = get_custom_filename(target_filenames) os.makedirs("model", exist_ok=True) with open("model/" + filename, 'wb') as file: pickle.dump(params["model"], file, pickle.HIGHEST_PROTOCOL) print("Model saved in model/" + filename)
def cli_login(): global session, globalError, semestres_final art = """ _____ __ __ __ ___ ___ __ __ ________ __ ||__ / ` | \|/ _`|| /\ | |__ \_/|__)/ \|__)||__ |__) ||___\__, |__/|\__>||/~~\|___ |___/ \| \__/| \||___| \ """ print(art) print("Exportador de archivos del TEC Digital") print("Creado por Joseph Vargas - https://twitter.com/JosephTico\n\n") print("Ingrese sus credenciales del TEC Digital y presione Enter.") if "TEC_USERNAME" in environ: username = environ.get('TEC_USERNAME') else: username = input("Usuario: ").strip() if "TEC_PASSWORD" in environ: password = environ.get('TEC_PASSWORD') else: password = getpass.getpass("Contraseña: ") spinner = PixelSpinner('Iniciando sesión... ') thread = threading.Thread(target=td_login,args=(username,password)) thread.start() while thread.is_alive() and globalError == False: spinner.next() thread.join() if globalError: return print("\n") print('Obteniendo cursos... ') thread = threading.Thread(target=obtener_cursos) thread.start() thread.join() if globalError: return print("\n") print("Se han cargado satisfactoriamente los siguientes cursos:") for semestre in semestres_final: print("# " + semestre["titulo"]) for curso in semestre["cursos"]: print("-- " + curso["titulo"]) print("\n") if "AUTO_DOWNLOAD" not in environ and not query_yes_no("¿Desea iniciar la descarga de todos los archivos en la carpeta actual?"): return for semestre in semestres_final: print("Descargando cursos de " + semestre["titulo"] + "...") if not os.path.exists(semestre["titulo"]): os.makedirs(semestre["titulo"]) for curso in semestre["cursos"]: for attempt in range(5): try: print("Descargando archivos de " + curso["titulo"] + "...") url = curso["url"] + "/download-archive?object_id=" + curso["folder_id"] response = session.get(url, stream=True) total_size_in_bytes= int(response.headers.get('content-length', 0)) block_size = 1024 #1 Kibibyte progress_bar = tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True) filename = os.path.join(dirname, semestre["titulo"], curso["titulo"] + ".zip") with open(filename, 'wb') as file: for data in response.iter_content(block_size): progress_bar.update(len(data)) file.write(data) if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes: raise Exception('Error al descargar el archivo.') try: with zipfile.ZipFile(filename,"r") as zip_ref: zip_ref.extractall(os.path.join(dirname, semestre["titulo"])) except: print("\nHa ocurrido un error al descomprimir los contenidos de este curso. Se mantendrá el archivo comprimido") else: os.remove(filename) os.remove(filename) progress_bar.close() except KeyboardInterrupt: sys.exit() except: print("\n\nERROR: Error al descargar el archivo. \nReintentando...") else: break else: print("Ha ocurrido un error al descargar el curso " + curso["titulo"] + ".\nSaltando...") print("\n") print("Proceso finalizado.")
quit() for (opt, val) in opts: if opt == '--help' or opt == '-h': usage() elif opt == "--database" or opt == '-d': options['database'] = val.split(',') connect = "dbname='" + options['database'] + "'" dbshell = psycopg2.connect(connect) dbshell.autocommit = True dbcursor = dbshell.cursor() osm = OsmFile("example.osm") bar = PixelSpinner('Processing... ' + options["database"]) # Get all the highways # query = """SELECT tags->'name',nodes,tags,ST_AsEWKT(linestring) FROM ways WHERE tags->'highway' is not NULL AND tags->'highway'!='path' LIMIT 5;""" query = """SELECT id,tags->'name',nodes,tags FROM ways WHERE tags->'highway' is not NULL AND tags->'highway'!='path';""" dbcursor.execute(query) all = dbcursor.fetchall() for line in all: bar.next() result = dict() result['id'] = line[0] result['name'] = line[1] result['nodes'] = line[2] result['tags'] = line[3] # print("WAY: %s" % result['tags'])
def __init__(self, accuracy): self.bar = Bar(max=accuracy) self.spinner = PixelSpinner()
'-filter_complex', '[0:v] fps=12,scale=w=480:h=-1,split [a][b];[a] palettegen=stats_mode=single [p];[' 'b][p] paletteuse=new=1', filename_generator % (input_file, current_time) ], stderr=subprocess.STDOUT, universal_newlines=True).strip() bar.goto(current_time) bar.finish() progress_start = time.time() video_files = [] video_files_populated = False spinner = PixelSpinner('Please wait while generating video files list ') while not video_files_populated: for file in glob.iglob('**/*.*', recursive=True): split_path_name = file.split("/") folder = save_to + split_path_name[0] if not os.path.isdir(file): if magic.from_file(file, mime=True)[:5] == 'video': video_files.append(file) spinner.next() video_files_populated = True for video in range(len(video_files)): generate_gif(video_files[video]) progress_end = time.time()