def test_pager() -> None: console = Console() pager_content: Optional[str] = None def mock_pager(content: str) -> None: nonlocal pager_content pager_content = content pager = SystemPager() pager._pager = mock_pager with console.pager(pager): console.print("[bold]Hello World") assert pager_content == "Hello World\n" with console.pager(pager, styles=True, links=False): console.print("[bold link https:/example.org]Hello World") assert pager_content == "Hello World\n"
def dump(console, client, parser) -> None: """Dump all entries for the month given in 'date'.""" separator = rich.padding.Padding(rich.rule.Rule(), (1, 0)) pager = console.pager( styles=True) if parser.pager else contextlib.nullcontext() if parser.dump_mode == parser.DumpMode.YEAR: entries = client.get_entries_year(parser.dump) elif parser.dump_mode == parser.DumpMode.MONTH: entries = client.get_entries_month(parser.dump) else: assert False # unreachable filtered = [ entry for entry in entries if (parser.project is None or entry.project == parser.project) and ( not parser.tags or set(parser.tags).issubset(entry.tags)) ] with pager: print_header(console, client, parser) for key, grouped_entries in itertools.groupby( reversed(filtered), key=lambda e: (e.start.date() if parser.dump_mode == parser.DumpMode.MONTH else e.start.date().strftime("%W")), ): if parser.dump_mode == parser.DumpMode.MONTH: title = key.strftime(DAY_TITLE_FORMAT) else: title = f"week {key}" print_entries( console=console, title=title, entries=reversed(list(grouped_entries)), debug=parser.debug, center=True, add_date=parser.dump_mode == parser.DumpMode.YEAR, ) console.print(separator) # FIXME this feels a bit hackish - can we split print_entries? print_entries( console=console, title="", entries=filtered, debug=False, only_totals=True, center=True, )
def main() -> NoReturn: """The main entry point""" args = parser.parse_args() if args.query is None or args.query == "": parser.print_help() else: sites = set(args.sites) try: with console.status(f"Searching {', '.join(sites)}\n"): returned_data = { site: sync_search(args.query, search_on_site=site) for site in sites } except errors.RecaptchaError as error: raise error except errors.StackSearchBaseError as error: console.print(rich.markup.escape(repr(error)), style="bold red") sys.exit(1) if args.raw: print(json.dumps(returned_data)) sys.exit(0) def print_questions_and_answers(data: Dict[str, Dict[str, List[str]]]): for site, questions in data.items(): console.rule(f"[bold]Site: [blue]{site}[/]\n\n") for question, answers in questions.items(): console.rule("Question") console.print(rich.markdown.Markdown(question)) console.rule("Answer(s)") if len(answers) == 0: console.print( "[bold red]There were no answers for this question[/]" ) else: for index, answer in enumerate(answers): console.rule(f"Answer [yellow]#{index}[/]", align="left") console.print(rich.markdown.Markdown(answer)) if args.pager: with console.pager(styles=args.pager_colors): print_questions_and_answers(returned_data) else: print_questions_and_answers(returned_data)
def dump(console, client, parser) -> None: """Dump all entries for the month given in 'date'.""" entries = client.get_entries_month(parser.dump) separator = rich.padding.Padding(rich.rule.Rule(), (1, 0)) pager = console.pager( styles=True) if parser.pager else contextlib.nullcontext() with pager: for date, day_entries in itertools.groupby( reversed(list(entries)), key=lambda e: e.start.date()): print_entries( console, date, reversed(list(day_entries)), debug=parser.debug, center=True, workspace_name=client.workspace_name, ) console.print(separator)