def test_justify_renderable_left(): console = Console( file=io.StringIO(), force_terminal=True, width=10, legacy_windows=False, _environ={}, ) console.print(Panel("FOO", expand=False, padding=0), justify="left") assert console.file.getvalue() == "â•â”€â”€â”€â•® \n│FOO│ \n╰───╯ \n"
def test_justify_renderable_center(): console = Console( file=io.StringIO(), force_terminal=True, width=10, legacy_windows=False, _environ={}, ) console.print(Panel("FOO", expand=False, padding=0), justify="center") assert console.file.getvalue() == " ╭───╮ \n │FOO│ \n ╰───╯ \n"
def test_save_svg(): console = Console(record=True, width=100) console.print( "[b red on blue reverse]foo[/] [blink][link=https://example.org]Click[/link]" ) with tempfile.TemporaryDirectory() as path: export_path = os.path.join(path, "example.svg") console.save_svg(export_path) with open(export_path, "rt") as svg_file: assert svg_file.read() == EXPECTED_SVG
def test_no_color(): console = Console(file=io.StringIO(), color_system="truecolor", force_terminal=True, no_color=True) console.print("[bold magenta on red]FOO") expected = "\x1b[1mFOO\x1b[0m\n" result = console.file.getvalue() print(repr(result)) assert result == expected
def cli_match(cfg, index_name, q): engine = migrate.migrate(cfg) i = index.Index.lookup(engine, index_name) # lookup the table class from the schema try: for obj in query.match(engine, i, q): console.print(obj) except AssertionError: console.log(f'Index {index_name} is not indexed by value!')
def cli_all(cfg, index_name): engine = migrate.migrate(cfg) idx = index.Index.lookup(engine, index_name) # read all records reader = query.fetch_all(cfg.s3_bucket, idx.s3_prefix) # lookup the table class from the schema for record in reader.records: console.print(orjson.dumps(record).decode('utf-8'))
def cli_query(cfg, index_name, q): engine = migrate.migrate(cfg) i = index.Index.lookup(engine, index_name) # query the index reader = query.fetch(cfg, engine, i, q) # dump all the records for record in reader.records: console.print(orjson.dumps(record).decode('utf-8'))
def create_key_mgr(self, keys): private_key_key_mgr = cct_common.PrivateKey.from_hex( keys["key_mgr"][0]["private"]) pkg_mgr_pub_keys = [k["public"] for k in keys["pkg_mgr"]] key_mgr = cct_metadata_construction.build_delegating_metadata( metadata_type="key_mgr", # 'root' or 'key_mgr' delegations={ "pkg_mgr": { "pubkeys": pkg_mgr_pub_keys, "threshold": 1 } }, version=1, # timestamp default: now # expiration default: now plus root expiration default duration ) key_mgr = cct_signing.wrap_as_signable(key_mgr) # sign dictionary in place cct_signing.sign_signable(key_mgr, private_key_key_mgr) key_mgr_serialized = cct_common.canonserialize(key_mgr) with open(self.folder / "key_mgr.json", "wb") as fobj: fobj.write(key_mgr_serialized) # let's run a verification root_metadata = cct_common.load_metadata_from_file(self.folder / "1.root.json") key_mgr_metadata = cct_common.load_metadata_from_file(self.folder / "key_mgr.json") cct_common.checkformat_signable(root_metadata) if "delegations" not in root_metadata["signed"]: raise ValueError('Expected "delegations" entry in root metadata.') root_delegations = root_metadata["signed"][ "delegations"] # for brevity cct_common.checkformat_delegations(root_delegations) if "key_mgr" not in root_delegations: raise ValueError( 'Missing expected delegation to "key_mgr" in root metadata.') cct_common.checkformat_delegation(root_delegations["key_mgr"]) # Doing delegation processing. cct_authentication.verify_delegation("key_mgr", key_mgr_metadata, root_metadata) console.print( "[green]Success: key mgr metadata verified based on root metadata." ) return key_mgr
def _print_error_table(self, data_results = None, label_results = None): results = {} if isinstance(data_results, dict): for key in data_results: results[key] = {} results[key]['data'] = data_results[key] results[key]['label'] = None if isinstance(label_results, dict): for key in label_results: if key in results: results[key]['label'] = label_results[key] else: results[key] = {'label':label_results[key], 'data':None} if not next(iter(results), None): return console.print('\n[b red]** Error Table **[/b red]') page = 1 page_length = math.ceil(len(results)/10) while True: table = rich.table.Table(show_header=True, header_style="bold magenta") table.add_column("FILE NAME") if isinstance(data_results, dict): table.add_column("DATA UPLOAD") if isinstance(label_results, dict): table.add_column("LABEL UPLOAD") for _ in range(10): key = next(iter(results), None) if not key: break if isinstance(data_results, dict) and isinstance(label_results, dict): data = results[key]['data'] label = results[key]['label'] table.add_row(key, f"{data if data else '-'}", f"{label if label else '-'}") elif isinstance(data_results, dict): data = results[key]['data'] table.add_row(key, f"{data if data else '-'}") else: label = results[key]['label'] table.add_row(key, f"{label if label else '-'}") del results[key] console.print(table) if not next(iter(results), None): break else: click.echo(f'Press any button to continue to the next page ({page}/{page_length}). Otherwise press ‘Q’ to quit.', nl=False) key = click.getchar() click.echo() if key=='q' or key=='Q': break console.log(f'[b]Check the log file for more details[/b]') console.log(f'- {simple_logger.handlers[0].baseFilename}') console.log(f'- {logger.handlers[0].baseFilename}')
def test_screen(): console = Console( color_system=None, force_terminal=True, force_interactive=True, _environ={} ) with console.capture() as capture: with console.screen(): console.print("Don't panic") expected = "\x1b[?1049h\x1b[H\x1b[?25lDon't panic\n\x1b[?1049l\x1b[?25h" result = capture.get() print(repr(result)) assert result == expected
def test_export_svg(): console = Console(record=True, width=100) console.print( "[b red on blue reverse]foo[/] [blink][link=https://example.org]Click[/link]" ) svg = console.export_svg() svg_main_code_hash = ( "857433718" # hard-coded here after the 1st time we ran this test ) expected_svg = EXPECTED_SVG.replace("${SVG_HASH}", svg_main_code_hash) assert svg == expected_svg
def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: console = rich.console.Console() http_text = format_request_headers(request, http2=http2) syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) console.print(syntax) syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) console.print(syntax)
def print_response_headers( http_version: bytes, status: int, reason_phrase: typing.Optional[bytes], headers: typing.List[typing.Tuple[bytes, bytes]], ) -> None: console = rich.console.Console() http_text = format_response_headers(http_version, status, reason_phrase, headers) syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) console.print(syntax) syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) console.print(syntax)
def test_save_svg(): console = Console(record=True, width=100) console.print( "[b red on blue reverse]foo[/] [blink][link=https://example.org]Click[/link]" ) svg_main_code_hash = "857433718" expected_svg = EXPECTED_SVG.replace("${SVG_HASH}", svg_main_code_hash) with tempfile.TemporaryDirectory() as path: export_path = os.path.join(path, "example.svg") console.save_svg(export_path) with open(export_path, "rt") as svg_file: assert svg_file.read() == expected_svg
def sign_repodata(self, repodata_fn, keys): target_folder = self.folder / repodata_fn.parent.name if not target_folder.exists(): target_folder.mkdir() final_fn = target_folder / repodata_fn.name print("copy", repodata_fn, final_fn) shutil.copyfile(repodata_fn, final_fn) pkg_mgr_key = keys["pkg_mgr"][0]["private"] cct_signing.sign_all_in_repodata(str(final_fn), pkg_mgr_key) console.print(f"[green]Signed [bold]{final_fn}[/bold]")
def _get_workspace_conf(self): workspace_conf_path = '.workspace' if os.path.isfile(workspace_conf_path): try: f = open(f"{workspace_conf_path}", 'r') return get_project_config(f.readline()) except: console.print(f"Error while reading workspace configuration. Try again") return None else: console.print(f"Workspace is not initiated. 'spb init' first.") return None
def test_screen_update_class(): screen_update = ScreenUpdate([[Segment("foo")], [Segment("bar")]], 5, 10) assert screen_update.x == 5 assert screen_update.y == 10 console = Console(force_terminal=True) console.begin_capture() console.print(screen_update) result = console.end_capture() print(repr(result)) expected = "\x1b[11;6Hfoo\x1b[12;6Hbar" assert result == expected
def _execute(state: State, p: ast.Print): # TODO Can be done better. Maybe cast to ReprText? inst = evaluate(state, p.value) if inst.type <= T.string: repr_ = cast_to_python(state, inst) else: repr_ = inst.repr(state) if isinstance(repr_, rich.table.Table): console = rich.console.Console() console.print(repr_) else: print(repr_)
def menu_or_quit(self): option = console.input( "[slate_blue1]\nEnter '!m' for menu or '!q' for exit: [/slate_blue1]" ).lower() print("") if option == "!m": return self.menu() elif option == "!q": exit() else: console.print("\n[red3][INVALID][/red3]\n") return self.menu_or_quit()
def init_project(self, directory_path, project): if os.path.isdir(directory_path): console.print( f"Error whilte initiating project. directory already exists. Try again" ) return os.mkdir(directory_path) f = open(f"{directory_path}/.workspace", 'w') f.write(f"{project.name}\t{project.id}") f.close() console.print( f"Workspace '{directory_path}' for project '{project.name}' has been created." )
def dump(console, client, parser) -> None: """Dump all entries for the month given in 'date'.""" separator = rich.padding.Padding(rich.rule.Rule(), (1, 0)) pager = console.pager( styles=True) if parser.pager else contextlib.nullcontext() if parser.dump_mode == parser.DumpMode.YEAR: entries = client.get_entries_year(parser.dump) elif parser.dump_mode == parser.DumpMode.MONTH: entries = client.get_entries_month(parser.dump) else: assert False # unreachable filtered = [ entry for entry in entries if (parser.project is None or entry.project == parser.project) and ( not parser.tags or set(parser.tags).issubset(entry.tags)) ] with pager: print_header(console, client, parser) for key, grouped_entries in itertools.groupby( reversed(filtered), key=lambda e: (e.start.date() if parser.dump_mode == parser.DumpMode.MONTH else e.start.date().strftime("%W")), ): if parser.dump_mode == parser.DumpMode.MONTH: title = key.strftime(DAY_TITLE_FORMAT) else: title = f"week {key}" print_entries( console=console, title=title, entries=reversed(list(grouped_entries)), debug=parser.debug, center=True, add_date=parser.dump_mode == parser.DumpMode.YEAR, ) console.print(separator) # FIXME this feels a bit hackish - can we split print_entries? print_entries( console=console, title="", entries=filtered, debug=False, only_totals=True, center=True, )
def verify_key(self): key = console.input("\n[deep_sky_blue1]ENTER Key: [/deep_sky_blue1]", password=True) with console.status("[bold green]Verifying...") as status: sleep(0.5) if key == SECRET_KEY_MAIN: console.print( "\n[bright_green][VERIFICATION SUCCESSFULL][/bright_green]\n" ) status.stop() return self.menu() else: console.print("\n[red][KEY ERROR]\n\n[EXITING]\n[/red]")
def view(self): option = console.input( "\n[deep_sky_blue1]Enter you master password to continue: [/deep_sky_blue1]", password=True) console.print("\n[pale_turquoise1]Your password(s)[/pale_turquoise1]") if option == SECRET_KEY_MAIN: with open(self.file_path, mode="r") as file: csvreader = csv.DictReader(file) count = 0 for row in csvreader: if row != {}: for col in row.keys(): row[col] = self.decrypt_data(row[col]) console.print( f"\n[dark_sea_green2]>[/dark_sea_green2] [pale_green1]{row}[/pale_green1]" ) count += 1 console.print("\n[aquamarine1]# Total:[/aquamarine1]", count, "\n") return app.menu_or_quit() else: console.print("\n[red3][WRONG PASSWORD]\n[/red3]") if option == self.escape_loop: return app.menu() return self.view()
def _print_rich_exception(console, e): console.print('[bold]Exception traceback:[/bold]') for ref in e.text_refs: for line in (ref.get_pinpoint_text(rich=True) if ref else ['???']): console.print(line) console.print() console.print(rich.text.Text('%s: %s' % (e.type, e.message)))
def _sequencial_executor(tasks: List[_Task]) -> bool: console = rich.console.Console() result_columns = rich.columns.Columns() fail = False for task in tasks: console.print(f'[bold dark_orange]> executing {task.name}') result = rich.panel.Panel('', title=f'[bold]{task.name}') try: task.func() except Exception: exc_type, exc_value, tb = sys.exc_info() assert exc_type and exc_value console.print( rich.traceback.Traceback.from_exception( exc_type, exc_value, tb.tb_next if tb else tb, )) console.print(f"[bold red]error executing '{task.name}'!") result.renderable = 'failed' result.style = 'red' else: result.renderable = 'success' result.style = 'green' fail = True result_columns.add_renderable(result) console.print(result_columns) return fail
def test_xaxis_console_render() -> None: """It renders an x axis.""" width = 80 console = rich.console.Console(file=io.StringIO(), width=width) console.print( axis.XAxis( min_data=15, max_data=150, tick_padding=3, min_tick_margin=2, width=width ) ) output = console.file.getvalue() # type: ignore[attr-defined] assert output == ( " ━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━━━━━━┳━━━" "━━━━━┳━━━━━━━━┳━━━\n 0.00 20.00 40.00 60.00" " 80.00 100.00 120.00 140.00 160.00 \n" )
def print_header(console, client, parser) -> None: """Print an overview of configured filters.""" grid = rich.table.Table.grid() grid.add_column() grid.add_column() grid.add_row("[yellow]Workspace: [/yellow]", client.workspace_name) if parser.project is not None: # FIXME get project color? grid.add_row("[cyan]Project: [/cyan]", parser.project) if parser.tags: grid.add_row("[blue]Tags: [/blue]", ", ".join(parser.tags)) separator = rich.padding.Padding(rich.rule.Rule(), (0, 0, 1, 0)) console.print(grid) console.print(separator)
def cli_list(cfg): engine = migrate.migrate(cfg) indexes = index.Index.list_indexes(engine, False) table = rich.table.Table(title='Indexes') table.add_column('Last Built') table.add_column('Index') table.add_column('S3 Prefix') table.add_column('Schema') for i in sorted(indexes, key=lambda i: i.name): built = f'[green]{i.built}[/]' if i.built else '[red]Not built[/]' table.add_row(built, i.name, i.s3_prefix, str(i.schema)) console.print(table)
def upload_data(self, project, dataset_name, directory_path, include_label, is_forced): video_paths = recursive_glob_video_paths(directory_path) if not is_forced: if not click.confirm(f"Uploading {len(video_paths)} data and {len(recursive_glob_label_files(directory_path)) if include_label else 0 } labels to dataset '{dataset_name}' under project '{project.name}'. Proceed?"): return asset_videos = [] manager = Manager() if len(video_paths) != 0: for key in video_paths: file_name = key asset_video = { 'files': video_paths[key], 'data_key': key, 'dataset': dataset_name } asset_videos.append(asset_video) data_results = manager.list([manager.dict()]*len(asset_videos)) console.print(f"Uploading data:") with Pool(NUM_MULTI_PROCESS) as p: list(tqdm.tqdm(p.imap(_upload_asset, zip([project.id] * len(asset_videos), [project.label_interface]*len(labels_path), asset_videos, data_results)), total=len(asset_videos))) else: data_results = [{}] label_results = None if include_label: labels_path = recursive_glob_label_files(directory_path) console.print(f"Uploading labels:") if len(labels_path) != 0: label_results = manager.list([manager.dict()]*len(labels_path)) with Pool(NUM_MULTI_PROCESS) as p: list(tqdm.tqdm(p.imap(_update_label, zip(labels_path, [project.id]*len(labels_path), [project.label_interface]*len(labels_path), [dataset_name]*len(labels_path), label_results)), total=len(labels_path))) else: label_results = [{}] console.print('\n[b blue]** Result Summary **[/b blue]') success_data_count = len(asset_videos) - len(data_results[0]) data_success_ratio = round(success_data_count/len(asset_videos)*100,2) if len(data_results[0]) != 0 else 100 console.print(f'Successful upload of {success_data_count} out of {len(asset_videos)} data. ({data_success_ratio}%) - [b red]{len(data_results[0])} ERRORS[/b red]') if include_label: success_label_count=len(labels_path)-len(label_results[0]) label_success_ratio = round(success_label_count/len(labels_path)*100,2) if len(label_results[0]) != 0 else 100 console.print(f'Successful upload of {success_label_count} out of {len(labels_path)} labels. ({label_success_ratio}%) - [b red]{len(label_results[0])} ERRORS[/b red]') self._print_error_table(dict(data_results[0]), dict(label_results[0])) else: self._print_error_table(data_results=dict(data_results[0])) self._print_error_table(data_results=dict(data_results[0]))
def print_response(response: httpx.Response) -> None: console = rich.console.Console() lexer_name = get_lexer_for_response(response) if lexer_name: if lexer_name.lower() == "json": try: data = response.json() text = json.dumps(data, indent=4) except ValueError: # pragma: nocover text = response.text else: text = response.text syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) console.print(syntax) else: # pragma: nocover console.print(response.text)