def test_progress_max_refresh() -> None: """Test max_refresh argment.""" time = 0.0 def get_time() -> float: nonlocal time try: return time finally: time = time + 1.0 console = Console(color_system=None, width=80, legacy_windows=False, force_terminal=True) column = TextColumn("{task.description}") column.max_refresh = 3 progress = Progress( column, get_time=get_time, auto_refresh=False, console=console, ) console.begin_capture() with progress: task_id = progress.add_task("start") for tick in range(6): progress.update(task_id, description=f"tick {tick}") progress.refresh() result = console.end_capture() print(repr(result)) assert ( result == "\x1b[?25l\r\x1b[2Kstart\r\x1b[2Kstart\r\x1b[2Ktick 1\r\x1b[2Ktick 1\r\x1b[2Ktick 3\r\x1b[2Ktick 3\r\x1b[2Ktick 5\r\x1b[2Ktick 5\n\x1b[?25h" )
class DefaultProgressHandler(BaseProgressHandler): def __init__(self): self.progress = Progress( TextColumn("[bold blue]{task.fields[title]}", justify="right"), BarColumn(bar_width=None), "[progress.percentage]{task.percentage:>3.1f}%", "•", DownloadColumn(), "•", TransferSpeedColumn(), "•", TimeRemainingColumn(), ) def initialize(self, *args, **kwargs): super().initialize(*args) self.download_task = self.progress.add_task(self.track_title, title=self.track_title, total=self.total_size) self.progress.start() def update(self, *args, **kwargs): super().update(**kwargs) self.progress.update(self.download_task, advance=self.current_chunk_size) def close(self, *args, **kwargs): self.progress.stop()
class ProgressBar: OPTIONS = [ "[progress.description]{task.description}", BarColumn(), "[progress.percentage]{task.completed:>6}/{task.total}", TimeElapsedColumn(), ] def __init__(self, description, total): self.description = description self.total = total def __enter__(self): self.progress = Progress(*self.OPTIONS) self.progress.start() self.task = self.progress.add_task(self.description, total=self.total) return self def __exit__(self, exc_type, exc_val, exc_tb): self.progress.stop() def print(self, message): self.progress.console.print(message) def advance(self, advance=1): self.progress.update(self.task, advance=advance)
def predict(trainData, trainLabel, testData, K=27): ''' 测试模型正确率 =========== Arguments --------- - `trainData` 训练集数据集 - `trainLabel` 训练集标记 - `testData` 测试集数据集 - `K` 选择近邻数 Returns ------- - `predictLabel` 预测标签 ''' predictLabel = [] progress = Progress( "[progress.description]{task.description}", BarColumn(bar_width=None), "[progress.percentage]{task.completed}/{task.total}", "•", TimeRemainingColumn(), ) # rich 进度条 progress.start() testTask = progress.add_task("[cyan]predicting...", total=len(testData)) for x in testData: predictLabel.append(NearestNeighbor(trainData, trainLabel, x, K)) # 预测标签分类 progress.update(testTask, advance=1) progress.stop() return predictLabel
def download(self, batch, articles, folder): """ Download a pdf batch """ log_file = os.path.join(folder, 'missing.log') logger.remove() logger.add(log_file, format="{time} {level} {message}", mode='w', level="INFO") assert len(articles) > 0, 'no article.' progress = Progress(TextColumn( "[progress.description]{task.description}", table_column=Column(ratio=1)), TimeElapsedColumn(table_column=Column(ratio=1)), BarColumn(table_column=Column(ratio=2)), "| {task.completed} of {task.total:>2.0f}", expand=False) missing = 0 with progress: task = progress.add_task(f" {batch} |", total=len(articles)) for article in articles: done = self.get_article( article['article_url'], os.path.join(folder, article['file_name'])) if done: progress.update(task, advance=1) else: missing += 1 logger.info("NOT_FOUND_IN_SCI-HUB | " + article['warning_str']) return missing, log_file
class _Progress: def __init__(self, show_limit: int = 50): from rich.progress import Progress, BarColumn, TimeRemainingColumn self._progress = Progress( "[progress.description]{task.description}", BarColumn(style="bar.back", complete_style="bar.complete", finished_style="bar.complete"), "[progress.percentage]{task.percentage:>3.0f}%", TimeRemainingColumn(), auto_refresh=False) self._last_update = _datetime.now() self._show_limit = show_limit self._completed = {} self._have_entered = False self._enter_args = None def __enter__(self, *args, **kwargs): self._enter_args = (args, kwargs) return self def __exit__(self, *args, **kwargs): if self._have_entered: self._progress.__exit__(*args, **kwargs) return False def add_task(self, description: str, total: int): if total > self._show_limit: if not self._have_entered: args, kwargs = self._enter_args self._progress.__enter__(*args, **kwargs) self._have_entered = True task_id = self._progress.add_task(description=description, total=total) self._completed[task_id] = 0 return task_id else: return None def update(self, task_id: int, completed: float = None, advance: float = None, force_update: bool = False): if task_id is None: return elif completed is not None: self._completed[task_id] = completed elif advance is not None: self._completed[task_id] += advance else: return now = _datetime.now() if force_update or (now - self._last_update).total_seconds() > 0.1: self._progress.update(task_id=task_id, completed=self._completed[task_id]) self._progress.refresh() self._last_update = now
async def fetch_taxid_call(name: str, progress: Progress, q: asyncio.queues.Queue, task: int): """ Handles calling asynchronous taxon id retrievals and updating task progress. Puts results in a asyncio Queue. Parameters: name (str): name of a OTU progress (Progress): Progress object for current tasks that can be updated q (asyncio.queues.Queue): Queue containing current results of tasks task (int): ID for a given progress task """ taxid = await asyncio.get_event_loop().run_in_executor( None, fetch_taxid, name) if taxid is None: description = f"[red]Could not retrieve taxon ID for {name}" result = f"[red]:x: Not found" else: description = f"[green]Retrieved taxon ID for {name}" result = f"[green]:heavy_check_mark: {taxid}" progress.print(description, result) progress.update(task, advance=1) await q.put((name, taxid))
def make_progress() -> Progress: _time = 0.0 def fake_time(): nonlocal _time try: return _time finally: _time += 1 console = Console( file=io.StringIO(), force_terminal=True, color_system="truecolor", width=80, legacy_windows=False, ) progress = Progress(console=console, get_time=fake_time, auto_refresh=False) task1 = progress.add_task("foo") task2 = progress.add_task("bar", total=30) progress.advance(task2, 16) task3 = progress.add_task("baz", visible=False) task4 = progress.add_task("egg") progress.remove_task(task4) task4 = progress.add_task("foo2", completed=50, start=False) progress.stop_task(task4) progress.start_task(task4) progress.update( task4, total=200, advance=50, completed=200, visible=True, refresh=True ) progress.stop_task(task4) return progress
class ProgressBarCallback(tf.keras.callbacks.Callback): def __init__(self, steps: int): super().__init__() self.steps = steps self.task = None self.bar = Progress( TextColumn( "Iteration {task.fields[iteration]}/{task.fields[cumul]}"), BarColumn(), TextColumn("[progress.percentage]{task.percentage:>3.0f}%"), TextColumn("Loss = {task.fields[loss]:.5f} | ⏳ "), TimeRemainingColumn(), TextColumn("lr = {task.fields[lr]:.6f}"), ) def on_train_begin(self, logs=None): self.task = self.task = self.bar.add_task( description="Optimizing...", total=self.steps, iteration=self.model._tot_batches, loss=self.model._loss, cumul=self.steps + self.model._tot_batches, lr=float(tf.keras.backend.get_value(self.model.optimizer.lr))) def on_train_batch_end(self, batch, logs=None): self.model._tot_batches += 1 self.bar.update( self.task, advance=1, refresh=True, iteration=self.model._tot_batches, loss=self.model._loss, lr=float(tf.keras.backend.get_value(self.model.optimizer.lr)))
def train(self, input_path, train_datasets): """ Train the model :param num_steps: :return: """ # Define the optimizer optimizer = tf.keras.optimizers.Adam(learning_rate=self.learning_rate) # Build model graph self.build_model(input_path=input_path, train_datasets=train_datasets, mode="train") # Training loop progress_bar = Progress( "[bold blue]{task.description}", "[bold cyan]Step: {task.fields[step]}, Loss: {task.fields[loss]}", BarColumn(bar_width=None), ) training_progress = progress_bar.add_task(self.model_name, total=self.num_steps, step=0, loss=1) with progress_bar: for step in range(self.num_steps): x, y = self.data_iter.get_next() with tf.GradientTape() as tape: self.logits = self.model(x, training=True) loss = self.compute_loss(self.logits, y) grads = tape.gradient(loss, self.model.trainable_weights) optimizer.apply_gradients( zip(grads, self.model.trainable_weights)) progress_bar.update(training_progress, advance=1, step=step, loss=f"{loss:.4f}") # Collect garbage after 100 steps - otherwise runs out of memory if step % 100 == 0: gc.collect() # Save the trained model self.model.save(self.model_dir) pd.DataFrame(self.labels).to_csv(os.path.join(self.model_dir, "celltypes.txt"), sep="\t") pd.DataFrame(self.sig_genes).to_csv(os.path.join( self.model_dir, "genes.txt"), sep="\t")
class ProgressHandler(BaseProgressHandler): def __init__(self): self.tracks = {} self.progress = Progress( TextColumn("[bold blue]{task.fields[title]}", justify="left"), BarColumn(bar_width=None), "[progress.percentage]{task.percentage:>3.2f}%", "•", DownloadColumn(), "•", TransferSpeedColumn(), "•", TimeRemainingColumn(), transient=True) def initialize(self, iterable, track_title, track_quality, total_size, chunk_size, **kwargs): track_id = kwargs["track_id"] task = self.progress.add_task(track_title, title=track_title, total=total_size) self.progress.console.print( f"[bold red]{track_title}[/] has started downloading.") self.tracks[track_id] = { "id": track_id, "iterable": iterable, "title": track_title, "quality": track_quality, "total_size": total_size, "chunk_size": chunk_size, "task": task, "size_downloaded": 0, "current_chunk_size": 0 } self.progress.start() def update(self, *args, **kwargs): track = self.tracks[kwargs["track_id"]] track["current_chunk_size"] = kwargs["current_chunk_size"] track["size_downloaded"] += track["current_chunk_size"] self.progress.update(track["task"], advance=track["current_chunk_size"]) def close(self, *args, **kwargs): track = self.tracks[kwargs["track_id"]] track_title = track["title"] self.progress.print( f"[bold red]{track_title}[/] is done downloading.") def close_progress(self): self.progress.refresh() self.progress.stop()
async def add_progress_bar(self, progress_task: ProgressTask, progress: Progress) -> None: """creates a new progress task and adds a ProgressTask instance to the internal registry""" task_id = progress.add_task(**progress_task.kwargs) if progress_task.progress: progress.update(task_id, advance=progress_task.progress) task = [t for t in progress.tasks if t.id == task_id][0] progress_task.task = task self.tasks[int(progress_task.sim_id)] = progress_task
def update(progress: Progress, overall: int, items: dict[str, tuple[int, str]], stats: dict[str, Any]) -> None: progress.update( overall, description=f"{stats['complete']} Gifts Obtained", completed=stats["progress"], ) for item in stats["items"]: progress.update( items[item[1]], completed=item[0], )
class Log(): def __init__(self): self.__progress = Progress( SpinnerColumn(), TextColumn( "[progress.description]{task.description} {task.fields[current]}" ), "•", BarColumn(), "•", TransferSpeedColumn(), TimeRemainingColumn(), console=Console(), transient=True, expand=True) self.__console = self.__progress.console def success(self, msg: str, **kwargs): self.__console.print(f"[bold green]{msg}[/bold green]", **kwargs) def error(self, msg: str, **kwargs): self.__console.print(f"[bold red]{msg}[/bold red]", **kwargs) def warning(self, msg: str, **kwargs): self.__console.print(f"[bold yellow]{msg}[/bold yellow]", **kwargs) # Get all the details avaiable about one or more object passed to it def details(self, *args): for arg in args: inspect(arg, methods=True) # This a simple and quick variadic print with option for some styling def debug(self, *args, **kwargs): for arg in args: self.__console.print(arg, **kwargs) # Print documentation both in a single string or markdown format def documentation(self, title: str, doc: str, markdown: bool = False): self.__console.rule(f"[bold yellow]{title}", align="center") if markdown: md = Markdown(doc) self.__console.print(md, justify="center") else: self.__console.print(f"[bold yellow]{doc}", justify="left") # Construct and return a tuple with ... def progress_bar_builder(self, label: str, total: int, current: str = ""): task_id = self.__progress.add_task(label, total, current=current) def update_task(increment: int, current: str): self.__progress.update(task_id, current=current, advance=increment) return (self.__progress, update_task)
async def download_image(session: aiohttp.ClientSession, url: str, file_path: str, progress: Progress, task): try: async with session.get(url) as response: assert response.status == 200 async with aiofiles.open(file_path, 'wb') as f: await f.write(await response.read()) progress.console.print(f'Downloaded {url} to {file_path}') except Exception as e: progress.console.print(f'Download for {url} failed: {str(e)}') progress.update(task, advance=1)
def benchmark(name: str, func: Callable, num_repeats: int, progress_bar: Progress) -> List[float]: """Return list of run times for func, num_repeats times""" run_times = [] text_color = '[blue]' task = progress_bar.add_task(f"{text_color}{name}", total=num_repeats) for i in range(num_repeats): progress_bar.tasks[ task].description = f"{text_color}{name}: run {i + 1}/{num_repeats}" run_times.append(timeit(func, number=1)) progress_bar.update(task, advance=1) return run_times
async def download_image( progress: Progress, task_id: TaskID, image_url: str, destination: Path ) -> None: async with httpx.AsyncClient() as client: response = await client.get(image_url) if response.status_code >= 400: progress.console.print(f'image [blue]{image_url}[/] could not be downloaded') progress.update(task_id, advance=1) return path = destination / image_url.split('/')[-1] path.write_bytes(response.content) progress.update(task_id, advance=1)
def generate_progress( self, progress: Progress, task: TaskID) -> Union[ConsoleRenderable, RichCast, str]: """Returns the updated progress bar""" total = 0 completed = 0 for w in self.workers: if w.__meta__.name == "CSVReader": total += w.__meta__.items_processed elif w.__meta__.name == "DomainResolver": completed += w.__meta__.items_processed progress.update(task, total=total, completed=completed) return progress.get_renderable()
def track(self, tasks, label="Processing"): progress = Progress( "[progress.description][yellow]{task.description} {task.fields[key]}", BarColumn(), "[progress.percentage]{task.percentage:>3.0f}%", TimeRemainingColumn(), console=self) with progress: task_bar = progress.add_task(label, total=len(tasks), key="") for idx, task in enumerate(tasks): progress.update(task_bar, completed=idx, key=task) yield task progress.update(task_bar, advance=1, key="finished")
class ProgressManager(): progress = None task = None current_pos = 0 total = 0 def __init__(self, total, current_pos): self.total = total self.progress = Progress( #SpinnerColumn(), "{task.description}", BarColumn(), "{task.completed}", "of", "{task.total}", "documents.", ) self.task = self.progress.add_task("Progress:", total=self.total) # Current position adjusted to non-zero self.current_pos = current_pos + 1 print('Document %s of %s' % (self.current_pos, self.total)) # First document self.add_progress(self.current_pos) def __rich__(self) -> Panel: try: return Panel(self.progress) except Exception as e: print(e) def add_progress(self, advance): self.progress.update(self.task, advance=advance) def next(self): if self.current_pos == self.total: self.current_pos = 1 self.progress.update(self.task, advance=-self.total + 1) else: self.progress.update(self.task, advance=1) self.current_pos += 1 def prev(self): if self.current_pos == 1: self.current_pos = self.total self.progress.update(self.task, advance=self.total - 1) else: self.progress.update(self.task, advance=-1) self.current_pos -= 1
def predict(trainData, trainLabel, testData, kernel='Gaussian', C=200, epsilon=0.0001, sigma=10, p=2): ''' 测试模型正确率 =========== Arguments --------- - `trainData` 训练集数据集 - `trainLabel` 训练集标记 - `testData` 测试集数据集 - `kernel` 核函数 - `C` 软间隔惩罚参数 - `epsilon` 松弛变量 - `sigma` 高斯核函数参数 - `p` 多项式核参数 Returns ------- - `predictLabel` 预测标签 ''' predictLabel = [] machine = SupportVectorMachine(kernel=kernel, C=C, epsilon=epsilon, sigma=sigma) machine.train(trainData, trainLabel) progress = Progress( "[progress.description]{task.description}", BarColumn(bar_width=None), "[progress.percentage]{task.completed}/{task.total}", "•", TimeRemainingColumn(), ) # rich 进度条 progress.start() testTask = progress.add_task("[cyan]predicting...", total=len(testData)) for testDatum in testData: predictLabel.append(machine.classify(testDatum)) # 预测标签分类 progress.update(testTask, advance=1) progress.stop() return predictLabel
async def advance_progress(self, data: Dict, progress: Progress) -> None: """advances the progress bar for the given tasks by the current simulation progress""" simulation_id = data["data"]["simulation_id"] sim_progress = data["data"]["progress"] progress_task = self.tasks.get(int(simulation_id)) if not progress_task.task.started: progress.start_task(progress_task.task.id) # progress.console.print(f">>>>>>>>>> {sim_progress}") if int(sim_progress) <= progress_task.task.percentage: return advance_by = int(sim_progress) - int(progress_task.task.percentage) progress.update(progress_task.task.id, advance=advance_by) # auto_refresh is False so do this manually progress.refresh()
def _download(url: str, root: str): os.makedirs(root, exist_ok=True) filename = os.path.basename(url) download_target = os.path.join(root, filename) if os.path.isfile(download_target): return download_target if os.path.exists(download_target) and not os.path.isfile(download_target): raise FileExistsError(f'{download_target} exists and is not a regular file') from rich.progress import ( DownloadColumn, Progress, TextColumn, TimeRemainingColumn, TransferSpeedColumn, ) progress = Progress( TextColumn("[bold blue]{task.fields[filename]}", justify="right"), "[progress.percentage]{task.percentage:>3.1f}%", "•", DownloadColumn(), "•", TransferSpeedColumn(), "•", TimeRemainingColumn(), ) with progress: task = progress.add_task('download', filename=url, start=False) with urllib.request.urlopen(url) as source, open( download_target, 'wb' ) as output: progress.update(task, total=int(source.info().get('Content-Length'))) progress.start_task(task) while True: buffer = source.read(8192) if not buffer: break output.write(buffer) progress.update(task, advance=len(buffer)) return download_target
def create_subsample_dataset(self, x, y, celltypes): """ Generate many artifial bulk samples with known fractions This function will create normal and sparse samples (no_samples) @param x: @param y: @param celltypes: @return: """ sim_x = [] sim_y = [] # Create normal samples progress_bar = Progress( "[bold blue]{task.description}", "[bold cyan]{task.fields[samples]}", BarColumn(bar_width=None), ) with progress_bar: normal_samples_progress = progress_bar.add_task( "Normal samples", total=self.num_samples, samples=0) sparse_samples_progress = progress_bar.add_task( "Sparse samples", total=self.num_samples, samples=0) # Create normal samples for i in range(self.num_samples): progress_bar.update(normal_samples_progress, advance=1, samples=i + 1) sample, label = self.create_subsample(x, y, celltypes) sim_x.append(sample) sim_y.append(label) # Create sparase samples for i in range(self.num_samples): progress_bar.update(sparse_samples_progress, advance=1, samples=i + 1) sample, label = self.create_subsample(x, y, celltypes, sparse=True) sim_x.append(sample) sim_y.append(label) sim_x = pd.concat(sim_x, axis=1).T sim_y = pd.DataFrame(sim_y, columns=celltypes) return sim_x, sim_y
def run(self, manager: "pwncat.manager.Manager", args): # Create a progress bar for the download progress = Progress( TextColumn("[bold cyan]{task.fields[filename]}", justify="right"), BarColumn(bar_width=None), "[progress.percentage]{task.percentage:>3.1f}%", "•", DownloadColumn(), "•", TransferSpeedColumn(), "•", TimeRemainingColumn(), ) if not args.destination: args.destination = f"./{os.path.basename(args.source)}" try: length = os.path.getsize(args.source) started = time.time() with progress: task_id = progress.add_task("upload", filename=args.destination, total=length, start=False) with open(args.source, "rb") as source: with manager.target.platform.open(args.destination, "wb") as destination: progress.start_task(task_id) copyfileobj( source, destination, lambda count: progress.update(task_id, advance=count), ) progress.update(task_id, filename="draining buffers...") progress.stop_task(task_id) progress.start_task(task_id) progress.update(task_id, filename=args.destination) elapsed = time.time() - started console.log(f"uploaded [cyan]{human_readable_size(length)}[/cyan] " f"in [green]{human_readable_delta(elapsed)}[/green]") except (FileNotFoundError, PermissionError, IsADirectoryError) as exc: self.parser.error(str(exc))
def make_progress() -> Progress: console = Console(file=io.StringIO(), force_terminal=True) progress = Progress(console=console) task1 = progress.add_task("foo") task2 = progress.add_task("bar", 30) progress.advance(task2, 16) task3 = progress.add_task("baz", visible=False) task4 = progress.add_task("egg") progress.remove_task(task4) task4 = progress.add_task("foo2", completed=50, start=False) progress.start_task(task4) progress.update( task4, total=200, advance=50, completed=200, visible=True, refresh=True ) return progress
def parse_apk(apkfile, workers, fn_match=None, outfile=None): console = Console() console.log(f"Parsing {apkfile} with {workers} workers ...") dexes = list(extract_dex_files(apkfile)) console.log(f"Found {len(dexes)} DEX file.") total = sum(map(lambda d: len(d.data), dexes)) progress = Progress( TextColumn("[progress.description]{task.description}"), BarColumn( complete_style='bar.complete', finished_style='bar.finished', pulse_style='bar.pulse', ), TextColumn("[progress.percentage]{task.percentage:>3.0f}%"), TimeRemainingColumn(), TimeElapsedColumn(), console=console, ) out = {} out.update(JNI_COMMON) num_classes = 0 t0 = datetime.now() with progress: task = progress.add_task("Analyzing...", total=total) with multiprocessing.Pool(workers) as pool: result = pool.imap(parse_dex_proc, dexes) for dex, count, res in result: if count == 0: console.log( "Parse {} ({} bytes) [bold red]failed: {}".format( dex.name, len(dex.data), res)) continue console.log("Parse {} ({} bytes), found {} classes.".format( dex.name, len(dex.data), count)) num_classes += count progress.update(task, advance=len(dex.data)) for cname, data in res.items(): if fn_match and not fn_match(cname): continue out.update(data) console.log("Aanlyzed {} classes, cost: {}".format(num_classes, datetime.now() - t0)) console.log("Found {} JNI methods.".format(len(out))) if not outfile: console.print_json(data=out) else: with open(outfile, 'w') as f: json.dump(out, f, indent=2, ensure_ascii=False)
def progress_step(self, step_key, step_name, total): """Context manager to show a progress bar during a step execution.""" step_progress = Progress( '{task.description}', SpinnerColumn(spinner_name='earth'), BarColumn(), TextColumn('[progress.percentage]{task.percentage:>3.0f}%'), ) step_job = step_progress.add_task(step_name, total=total) self.progress_bars[step_key] = { C.PROGRESS_KEY: step_progress, C.JOB_KEY: step_job, } self.progress_table.add_row(step_progress) yield step_progress.update(step_job, completed=step_progress.tasks[0].total)
def retrieve_over_http(url, output_file_path): """Download file from remote location, with progress bar. Parameters ---------- url : str Remote URL. output_file_path : str or Path Full file destination for download. """ # Make Rich progress bar progress = Progress( TextColumn("[bold]Downloading...", justify="right"), BarColumn(bar_width=None), "{task.percentage:>3.1f}%", "•", DownloadColumn(), "• speed:", TransferSpeedColumn(), "• ETA:", TimeRemainingColumn(), ) CHUNK_SIZE = 4096 response = requests.get(url, stream=True) try: with progress: task_id = progress.add_task( "download", filename=output_file_path.name, start=True, total=int(response.headers.get("content-length", 0)), ) with open(output_file_path, "wb") as fout: for chunk in response.iter_content(chunk_size=CHUNK_SIZE): fout.write(chunk) progress.update(task_id, advance=len(chunk), refresh=True) except requests.exceptions.ConnectionError: output_file_path.unlink() raise requests.exceptions.ConnectionError( f"Could not download file from {url}" )
class RichLogger(Logger): def __init__(self) -> None: self.console = autogoal.logging.console() self.logger = autogoal.logging.logger() def begin(self, generations, pop_size): self.progress = Progress(console=self.console) self.pop_counter = self.progress.add_task("Generation", total=pop_size) self.total_counter = self.progress.add_task("Overall", total=pop_size * generations) self.progress.start() self.console.rule("Search starting", style="blue") def sample_solution(self, solution): self.progress.advance(self.pop_counter) self.progress.advance(self.total_counter) self.console.rule("Evaluating pipeline") self.console.print(repr(solution)) def eval_solution(self, solution, fitness): self.console.print(Panel(f"📈 Fitness=[blue]{fitness:.3f}")) def error(self, e: Exception, solution): self.console.print(f"⚠️[red bold]Error:[/] {e}") def start_generation(self, generations, best_fn): self.console.rule( f"New generation - Remaining={generations} - Best={best_fn or 0:.3f}" ) def start_generation(self, generations, best_fn): self.progress.update(self.pop_counter, completed=0) def update_best(self, new_best, new_fn, previous_best, previous_fn): self.console.print( Panel( f"🔥 Best improved from [red bold]{previous_fn or 0:.3f}[/] to [green bold]{new_fn:.3f}[/]" )) def end(self, best, best_fn): self.console.rule(f"Search finished") self.console.print(repr(best)) self.console.print(Panel(f"🌟 Best=[green bold]{best_fn or 0:.3f}")) self.progress.stop() self.console.rule("Search finished", style="red")