def _iteration_parameters(image_rows, image_cols, row_block_size, col_block_size, y_overlap=0, x_overlap=0, bands=1): maximum_blocks = 0 for i in range(0, image_rows, row_block_size - y_overlap): for j in range(0, image_cols, col_block_size - x_overlap): if bands > 1: for band in range(1, bands + 1): maximum_blocks += 1 else: maximum_blocks += 1 progress_widgets = [ ' Percent: ', widgets.Percentage(), ' ', widgets.Bar(marker='*', left='[', right=']'), ' ', widgets.ETA(), ' ', widgets.FileTransferSpeed() ] progress_bar = ProgressBar(widgets=progress_widgets, maxval=maximum_blocks) progress_bar.start() return 1, progress_bar
def _iteration_parameters_values(value1, value2): # Set widget and pbar progress_widgets = [' Perc: ', widgets.Percentage(), ' ', \ widgets.Bar(marker='*', left='[', right=']'), ' ', \ widgets.ETA(), ' ', widgets.FileTransferSpeed()] progress_bar = ProgressBar(widgets=progress_widgets, maxval=value1 * value2) progress_bar.start() return 1, progress_bar
def pakt_init( name: typing.Optional[str], boot: bool = True) -> None: # setting parameters and return types if not isinstance( name, str ): # checking if the name provided is a string or not, type hinting doesn't raise errors iirc raise TypeError("`Name` has to be a string." ) # raising error if they messed up # white space because clean code if not isinstance(boot, bool): # checking if boot is a boolean or not raise TypeError("`Boot` has to be a boolean.") # raising error # white space because clean code os.system("clear") # clearing screen # more white space because clean code print("Initializing system") # more white space because clean code pakt_init_progress_bar = ProgressBar( maxval=100) # creating instance of `ProgressBar` pakt_init_progress_bar.start() # starting instance of `ProgressBar` # more white space because clean code for i in range(0, 100): # creating loop to iterate over i if not isinstance(i, int): # type checking raise TypeError( "Iterator variable `i` must be an integer in this context." ) # raising error if type check fails # more white space because clean code pakt_init_progress_bar.update(i + 1) # updating progress bar sleep(0.025) if random.randint(0, 100) < 75: # generate random number sleep(2) # do important init shit here pakt_init_progress_bar.finish( ) # finish progress bar (same with ur mother) # more white space because clean code print("Pakt Init is done initializing the system.") # we are done ctypes.string_at(0) # segfault python because F**K YOU
def __init__(self, jobs, global_params, num_cpu=1, quiet=False, worker_class=Worker, debug=False): self.jobs = jobs self.global_params = global_params self.num_cpu = num_cpu self.quiet = quiet self.worker_class = worker_class self.work_queue = multiprocessing.Queue() self.debug = debug self.result_queue = multiprocessing.Queue() self.current_queue = multiprocessing.Queue() self.num_jobs = 0 for job in self.jobs: job['id'] = self.num_jobs self.work_queue.put(job) self.num_jobs += 1 self.results = [] self.workers = [] self.ongoing_work = {} self.init_workers() self.done = False self.progress = ProgressBar('green', width=80, block='█', empty='░') self.progress_time = '' self.progress_counts = '' self.progress_workers = '' self.progress_message = '' self.progress_premessage = '' self.abs_iTime = time.time() self.iTime = datetime.fromtimestamp(self.abs_iTime)
def download_flyvec_data(outfile=None, force=False): """Download the zipped flyvec model from the cloud to a local file. If `outfile` is not provided, use (the OS's) TEMPDIR / 'flyvec-data.zip' """ tmp_file = Path(outfile) if outfile is not None else Path( tempfile.gettempdir()) / "flyvec-data.zip" if tmp_file.exists() and not force: print(f"Found existing {tmp_file}, reusing") return tmp_file access_key = "07598db5c9364ad29002fe8e22daddd3" secret_key = "a7bec64c8840439576380beb238b161117f2aeb3e7f993f0" service_endpoint = 'https://s3.ap.cloud-object-storage.appdomain.cloud' session = boto3.Session(aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name="ap-geo") s3 = session.resource("s3", endpoint_url=service_endpoint) bucket = s3.Bucket("hoo-flyvec") obj = bucket.Object("data.zip") down_progress = ProgressBar(obj.content_length) print("Downloading flyvec data:") down_progress.start() def download_progress(chunk): down_progress.update(down_progress.currval + chunk) with open(str(tmp_file), 'wb') as fd: obj.download_fileobj(fd, Callback=download_progress) down_progress.finish() return tmp_file
def data2hdf5(class_dct, emb_data, p2img, p2save, group_name): bar = ProgressBar() f = h5.File(p2save, "w") group = f.create_group(name=group_name) for class_name, items_list in bar(class_dct.items()): class_group = group.create_group(name=class_name) for item in items_list: img = Image.open(os.path.join(p2img, item + ".jpg")) img = np.asarray(img, dtype="uint8") emb = emb_data["embeddings"][item][:] item_group = class_group.create_group(name=item) item_group.create_dataset(name="image", data=img) item_group.create_dataset(name="embeddings", data=emb) f.close()
def insertion_json_file(table_name, db_path, json_file): """ This function is used to unserialize a json file and to insert the content into a table It needs the table name, the database path and the json path """ db = DB(db_path) progress = ProgressBar() s = Seri() if table_name == "Installation": s.unserialize_json_installation(json_file) for item in progress(s.collection): db.insertion_installation(item) elif table_name == "Equipment": s.unserialize_json_equipment(json_file) for item in progress(s.collection): db.insertion_equipment(item) elif table_name == "Activity": s.unserialize_json_activity(json_file) for item in progress(s.collection): db.insertion_activity(item) db.disconnect(True)
class Controller: def __init__(self, jobs, global_params, num_cpu=1, quiet=False, worker_class=Worker, debug=False): self.jobs = jobs self.global_params = global_params self.num_cpu = num_cpu self.quiet = quiet self.worker_class = worker_class self.work_queue = multiprocessing.Queue() self.debug = debug self.result_queue = multiprocessing.Queue() self.current_queue = multiprocessing.Queue() self.num_jobs = 0 for job in self.jobs: job['id'] = self.num_jobs self.work_queue.put(job) self.num_jobs += 1 self.results = [] self.workers = [] self.ongoing_work = {} self.init_workers() self.done = False self.progress = ProgressBar('green', width=80, block='█', empty='░') self.progress_time = '' self.progress_counts = '' self.progress_workers = '' self.progress_message = '' self.progress_premessage = '' self.abs_iTime = time.time() self.iTime = datetime.fromtimestamp(self.abs_iTime) def init_workers(self): for i in range(self.num_cpu): worker = self.worker_class( self.work_queue, self.result_queue, self.current_queue, self.global_params, ) self.workers.append(worker) self.work_queue.put(None) self.ongoing_work[worker.name] = {} def update_progress(self, one_time=True, daemon=False): if one_time: percent = int((len(self.results) / float(self.num_jobs)) * 100) self.progress.render(percent, self.update_progress_message(), self.update_progress_premessage()) if daemon and not self.done: threading.Timer( interval=.2, function=self.update_progress, kwargs={'daemon': daemon, 'one_time': True} ).start() def update_progress_premessage(self): return self.progress_premessage def update_progress_message(self): self.progress_message = '\n'.join([ self.update_progress_counts(), '\n', #self.progress_workers, '\n', self.update_progress_time(), '\n' ]) return self.progress_message def update_progress_counts(self): self.progress_counts = '/'.join(map(str, [ len(self.results), self.num_jobs ])) return self.progress_counts def update_progress_workers(self): rows = [[' ' * 20 + 'Worker', 'Job ID', 'Time'], [''] * 3] for w, s in self.ongoing_work.items(): if s and s['job']: rows.append([s['worker'], s['job']['id'], s['time']]) else: rows.append([w, '---', '---']) cols = zip(*rows) col_widths = [max(len(str(value)) for value in col) for col in cols] formatt = '\t\t'.join(['%%%ds' % width for width in col_widths]) self.progress_workers = '\n'.join([ formatt % tuple(row) for row in rows ]) return self.progress_workers def update_progress_time(self): time_tick = time.time() tick = datetime.fromtimestamp(time_tick) rd = relativedelta(tick, self.iTime) edays = '%d days, ' % (rd.days) if rd.days else '' ehours = '%d hours, ' % (rd.hours) if rd.hours else '' eminutes = '%d minutes, ' % (rd.minutes) if rd.minutes else '' eseconds = '%d seconds' % (rd.seconds) len_res = len(self.results) speed = 1 if len_res != 0: speed = len_res / (time_tick - self.abs_iTime) remaining_time = (self.num_jobs - len(self.results)) / speed end_time = datetime.fromtimestamp(time_tick + remaining_time) rt = relativedelta(end_time, tick) rdays = '%d days, ' % (rt.days) if rt.days else '' rhours = '%d hours, ' % (rt.hours) if rt.hours else '' rminutes = '%d minutes, ' % (rt.minutes) if rt.minutes else '' rseconds = '%d seconds' % (rt.seconds) self.progress_elapsed = ''.join([ ' ' * 3, 'Elapsed time : ', edays, ehours, eminutes, eseconds, '\t\t', 'Estimated remaining time : ', rdays, rhours, rminutes, rseconds ]) return self.progress_elapsed def cleanup(self): for worker in self.workers: worker.kill() def finish(self): raise NotImplementedError def start(self): try: for worker in self.workers: worker.start() if not self.quiet: self.update_progress(one_time=True, daemon=True) #while self.done_workers < len(self.workers): while True: #try: #state = self.current_queue.get_nowait() #if state['time'] is None: #self.done_workers += 1 #self.ongoing_work[state['worker']] = state #self.update_progress_workers() #except Queue.Empty: #pass self.done = len(self.results) >= self.num_jobs if self.done: break try: self.results.append(self.result_queue.get_nowait()) except Queue.Empty: time.sleep(0.05) #self.update_progress_workers() if not self.quiet: self.update_progress(one_time=True, daemon=False) except KeyboardInterrupt: sys.exit(-1) except Exception: if self.debug: traceback.print_exc() finally: time.sleep(1) self.finish() if not self.debug: self.cleanup()
def main(): print "Loading configuration" with open("config.cfg", "r") as f: config = yaml.load(f) qa_contacts = config["qa_contacts"] flags = config["flags"] username = config.get("username", None) password = config.get("password", None) certificate_name = config["certificate_name"] username = os.getenv("BZ_USER", username) if not username: print( "Username was neither available at the config file or env vars (var name: BZ_USER)\n" "please input user name manually") username = raw_input("Username: "******"BZ_PASSWORD", password) if not password: print( "Password was neither available at the config file or env vars (BZ_USER)\n" "please input password manually") password = getpass.getpass() print "Collecting issues from Bugzilla" report_builder = report_gen("https://bugzilla.redhat.com", username=username, password=password) report_builder.parallelizer.start_parallelizer() for flag in flags: report_builder.get_issues_for_qa_contact(qa_contacts, flag) print "" print "*********************" print "" print "*** Total ***********" print "{count} issues found".format(count=len(report_builder.all_bugs)) print "***By User **********" issues_by_user = {} for issue in report_builder.all_bugs: contact = issue[1] issues_by_user[contact] = issues_by_user.get(contact, 0) + 1 for contact, count in issues_by_user.items(): print "{contact}: {count}".format(contact=contact, count=count) print "*********************" print "" pbar = ProgressBar().start() pbar.start() while report_builder.parallelizer.has_tasks_on_pipeline: pbar.update(report_builder.parallelizer.get_done_percentage) time.sleep(0.2) pbar.finish() print "Waiting for all the workers to finish" report_builder.parallelizer.stop_parallelizer() print "Saving report to G cloud" report_builder.save_to_google_drive_full_report(qa_contacts, flags, certificate_name)