示例#1
0
    def get_rich(self):
        out = Group()

        for c in self.certs:
            if c is None:
                out.renderables.append(
                    Panel(
                        "[red]Certificate could not be read. Please check the format.[/red]"
                    ))
                continue

            try:
                if (c.not_valid_after -
                        datetime.now()).days < self.RED_THRESHOLD_DAYS:
                    color = "red"
                else:
                    color = "green"

                fp = c.fingerprint(hashes.SHA1()).hex(":").upper()

                out.renderables.append(
                    f"{c.subject.rfc4514_string()}\n  [{color}]valid until {c.not_valid_after}[/{color}]\n  {fp}"
                )
            except Exception as e:
                out.renderables.append(
                    Panel(f"[red](unable to decode: {e})[/red]"))
        return out
 def get_renderable(self):
     if self.tasks == {}:
         return None
     else:
         return Panel(
             Group(*(display.get_renderable()
                     for display in self.tasks.values())))
示例#3
0
    def _update_live(self) -> None:
        elements = []
        if self._crawl_progress.task_ids:
            elements.append(self._crawl_progress)
        if self._download_progress.task_ids:
            elements.append(self._download_progress)

        group = Group(*elements)
        self._live.update(group)
示例#4
0
    def __init__(
        self,
        *renderables: "RenderableType",
        style: Optional[StyleType] = None,
        application_mode: bool = False,
    ) -> None:
        from rich.console import Group

        self.renderable = Group(*renderables)
        self.style = style
        self.application_mode = application_mode
示例#5
0
文件: _terminal.py 项目: AABur/ward
 def get_pretty_comparison_failure(
         self, err: TestAssertionFailure) -> RenderableType:
     diff = self.get_diff(err)
     parts = [
         self.get_operands(err) if not diff else None,
         diff,
     ]
     return Padding(
         Group(*(part for part in parts if part)),
         pad=(0, 0, 1, 2),
     )
示例#6
0
def timing_stats_expected_panel(expected_table=timing_stats_expected_table):
    return Panel(
        Group(
            Padding(
                "Median: [b]4000.00[/b]ms"
                " [muted]|[/muted] "
                "99th Percentile: [b]5000.00[/b]ms",
                pad=(0, 0, 1, 0),
            ),
            expected_table,
        ),
        title="[b white]3 Slowest Tests[/b white]",
        style="none",
        border_style="rule.line",
    )
示例#7
0
def rich_live_cm():
    """
    Return Live instance to use as context manager.
    """
    if show_progress_bars():
        pbar_group = Group(
            download_one_progress_bar(),
            download_one_progress_bar_unknown_size(),
            download_all_progress_bar(),
            extensions_progress_bar(),
            easyconfig_progress_bar(),
            status_bar(),
        )
        live = Live(pbar_group)
    else:
        live = DummyRich()

    return live
示例#8
0
文件: _terminal.py 项目: AABur/ward
    def __rich_console__(self, c: Console, co: ConsoleOptions) -> RenderResult:
        def sort_key(r: TestResult) -> float:
            assert r.test.timer, "test must've been run already"
            return r.test.timer.duration

        test_results = sorted(self.all_tests_in_session,
                              key=sort_key,
                              reverse=True)
        grid = Table.grid(padding=(0, 2, 0, 0))
        grid.add_column(justify="right")  # Time taken
        grid.add_column()  # Test ID
        grid.add_column()  # Test description

        for result in test_results[:self.num_tests_to_show]:
            assert result.test.timer, "test must've been run already"
            time_taken_secs = result.test.timer.duration
            time_taken_millis = time_taken_secs * 1000
            test_id = format_test_id(result)
            description = result.test.description
            grid.add_row(
                f"[b]{time_taken_millis:.0f}[/b]ms",
                Text(test_id, style="muted"),
                description,
            )

        num_slowest_displayed = min(len(self.all_tests_in_session),
                                    self.num_tests_to_show)
        panel = Panel(
            Group(
                Padding(
                    f"Median: [b]{self._median_secs * 1000:.2f}[/b]ms"
                    f" [muted]|[/muted] "
                    f"99th Percentile: [b]{self._percentile99_secs * 1000:.2f}[/b]ms",
                    pad=(0, 0, 1, 0),
                ),
                grid,
            ),
            title=f"[b white]{num_slowest_displayed} Slowest Tests[/b white]",
            style="none",
            border_style="rule.line",
        )

        yield panel
示例#9
0
def make_sponsor_message() -> Panel:
    """Some example content."""
    sponsor_message = Table.grid(padding=1)
    sponsor_message.add_column(style="green", justify="right")
    sponsor_message.add_column(no_wrap=True)
    sponsor_message.add_row(
        "Sponsor me",
        "[u blue link=https://github.com/sponsors/willmcgugan]https://github.com/sponsors/willmcgugan",
    )
    sponsor_message.add_row(
        "Buy me a :coffee:",
        "[u blue link=https://ko-fi.com/willmcgugan]https://ko-fi.com/willmcgugan",
    )
    sponsor_message.add_row(
        "Twitter",
        "[u blue link=https://twitter.com/willmcgugan]https://twitter.com/willmcgugan",
    )
    sponsor_message.add_row(
        "Blog",
        "[u blue link=https://www.willmcgugan.com]https://www.willmcgugan.com")

    intro_message = Text.from_markup(
        """Consider supporting my work via Github Sponsors (ask your company / organization), or buy me a coffee to say thanks. - Will McGugan"""
    )

    message = Table.grid(padding=1)
    message.add_column()
    message.add_column(no_wrap=True)
    message.add_row(intro_message, sponsor_message)

    message_panel = Panel(
        Align.center(
            Group(intro_message, "\n", Align.center(sponsor_message)),
            vertical="middle",
        ),
        box=box.ROUNDED,
        padding=(1, 2),
        title="[b red]Thanks for trying out Rich!",
        border_style="bright_blue",
    )
    return message_panel
示例#10
0
文件: dbgcui.py 项目: profiles/qiling
def make_memory_panel(debugger) -> Panel:
    addr = 0
    mem_bytes = debugger.executor.vm_context.memory_read_bytes(0, 16*8)
    byte = ''.join(['%02X' % b for b in mem_bytes])
    res = hexdump(byte, start=int(addr), to_list=True)
    mem_table = Table.grid(padding=0)
    
    for i in res:
        mem_table.add_row(i)

    memory_panel = Panel(
        Align.center(
            Group('', "\n", Align.center(mem_table)),
            vertical="top",
        ),
        box=box.ROUNDED,
        padding=(0, 1),
        title="[b red]Memory",
        border_style="bright_blue",
    )
    return memory_panel
示例#11
0
文件: __main__.py 项目: baojd42/rich
def make_test_card() -> Table:
    """Get a renderable that demonstrates a number of features."""
    table = Table.grid(padding=1, pad_edge=True)
    table.title = "Rich features"
    table.add_column("Feature",
                     no_wrap=True,
                     justify="center",
                     style="bold red")
    table.add_column("Demonstration")

    color_table = Table(
        box=None,
        expand=False,
        show_header=False,
        show_edge=False,
        pad_edge=False,
    )
    color_table.add_row(
        # "[bold yellow]256[/] colors or [bold green]16.7 million[/] colors [blue](if supported by your terminal)[/].",
        ("✓ [bold green]4-bit color[/]\n"
         "✓ [bold blue]8-bit color[/]\n"
         "✓ [bold magenta]Truecolor (16.7 million)[/]\n"
         "✓ [bold yellow]Dumb terminals[/]\n"
         "✓ [bold cyan]Automatic color conversion"),
        ColorBox(),
    )

    table.add_row("Colors", color_table)

    table.add_row(
        "Styles",
        "All ansi styles: [bold]bold[/], [dim]dim[/], [italic]italic[/italic], [underline]underline[/], [strike]strikethrough[/], [reverse]reverse[/], and even [blink]blink[/].",
    )

    lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque in metus sed sapien ultricies pretium a at justo. Maecenas luctus velit et auctor maximus."
    lorem_table = Table.grid(padding=1, collapse_padding=True)
    lorem_table.pad_edge = False
    lorem_table.add_row(
        Text(lorem, justify="left", style="green"),
        Text(lorem, justify="center", style="yellow"),
        Text(lorem, justify="right", style="blue"),
        Text(lorem, justify="full", style="red"),
    )
    table.add_row(
        "Text",
        Group(
            Text.from_markup(
                """Word wrap text. Justify [green]left[/], [yellow]center[/], [blue]right[/] or [red]full[/].\n"""
            ),
            lorem_table,
        ),
    )

    def comparison(renderable1: RenderableType,
                   renderable2: RenderableType) -> Table:
        table = Table(show_header=False, pad_edge=False, box=None, expand=True)
        table.add_column("1", ratio=1)
        table.add_column("2", ratio=1)
        table.add_row(renderable1, renderable2)
        return table

    table.add_row(
        "Asian\nlanguage\nsupport",
        ":flag_for_china:  该库支持中文,日文和韩文文本!\n:flag_for_japan:  ライブラリは中国語、日本語、韓国語のテキストをサポートしています\n:flag_for_south_korea:  이 라이브러리는 중국어, 일본어 및 한국어 텍스트를 지원합니다",
    )

    markup_example = (
        "[bold magenta]Rich[/] supports a simple [i]bbcode[/i]-like [b]markup[/b] for [yellow]color[/], [underline]style[/], and emoji! "
        ":+1: :apple: :ant: :bear: :baguette_bread: :bus: ")
    table.add_row("Markup", markup_example)

    example_table = Table(
        show_edge=False,
        show_header=True,
        expand=False,
        row_styles=["none", "dim"],
        box=box.SIMPLE,
    )
    example_table.add_column("[green]Date", style="green", no_wrap=True)
    example_table.add_column("[blue]Title", style="blue")
    example_table.add_column(
        "[cyan]Production Budget",
        style="cyan",
        justify="right",
        no_wrap=True,
    )
    example_table.add_column(
        "[magenta]Box Office",
        style="magenta",
        justify="right",
        no_wrap=True,
    )
    example_table.add_row(
        "Dec 20, 2019",
        "Star Wars: The Rise of Skywalker",
        "$275,000,000",
        "$375,126,118",
    )
    example_table.add_row(
        "May 25, 2018",
        "[b]Solo[/]: A Star Wars Story",
        "$275,000,000",
        "$393,151,347",
    )
    example_table.add_row(
        "Dec 15, 2017",
        "Star Wars Ep. VIII: The Last Jedi",
        "$262,000,000",
        "[bold]$1,332,539,889[/bold]",
    )
    example_table.add_row(
        "May 19, 1999",
        "Star Wars Ep. [b]I[/b]: [i]The phantom Menace",
        "$115,000,000",
        "$1,027,044,677",
    )

    table.add_row("Tables", example_table)

    code = '''\
def iter_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]:
    """Iterate and generate a tuple with a flag for last value."""
    iter_values = iter(values)
    try:
        previous_value = next(iter_values)
    except StopIteration:
        return
    for value in iter_values:
        yield False, previous_value
        previous_value = value
    yield True, previous_value'''

    pretty_data = {
        "foo": [
            3.1427,
            (
                "Paul Atreides",
                "Vladimir Harkonnen",
                "Thufir Hawat",
            ),
        ],
        "atomic": (False, True, None),
    }
    table.add_row(
        "Syntax\nhighlighting\n&\npretty\nprinting",
        comparison(
            Syntax(code, "python3", line_numbers=True, indent_guides=True),
            Pretty(pretty_data, indent_guides=True),
        ),
    )

    markdown_example = """\
# Markdown

Supports much of the *markdown*, __syntax__!

- Headers
- Basic formatting: **bold**, *italic*, `code`
- Block quotes
- Lists, and more...
    """
    table.add_row(
        "Markdown",
        comparison("[cyan]" + markdown_example, Markdown(markdown_example)))

    table.add_row(
        "+more!",
        """Progress bars, columns, styled logging handler, tracebacks, etc...""",
    )
    return table
示例#12
0
def test_end():
    console = Console(width=20, file=StringIO())
    test = Group(Text.from_markup("foo", end=" "), Text.from_markup("bar"))
    console.print(test)
    assert console.file.getvalue() == "foo bar\n"
示例#13
0
    def __rich_measure__(
        self, console: "Console", options: "ConsoleOptions"
    ) -> Measurement:
        measurement = Measurement.get(console, options, self.renderable)
        return measurement


if __name__ == "__main__":  # pragma: no cover
    from rich.console import Console, Group
    from rich.highlighter import ReprHighlighter
    from rich.panel import Panel

    highlighter = ReprHighlighter()
    console = Console()

    panel = Panel(
        Group(
            Align.left(highlighter("align='left'")),
            Align.center(highlighter("align='center'")),
            Align.right(highlighter("align='right'")),
        ),
        width=60,
        style="on dark_blue",
        title="Algin",
    )

    console.print(
        Align.center(panel, vertical="middle", style="on red", height=console.height)
    )
示例#14
0
table.add_row("May 25, 2018", "Solo: A Star Wars Story", "$393,151,347")
table.add_row("Dec 15, 2017", "Star Wars Ep. V111: The Last Jedi",
              "$1,332,539,889")
table.add_row("Dec 16, 2016", "Rogue One: A Star Wars Story", "$1,332,439,889")

console.print(table)

from rich.panel import Panel
from rich.text import Text
panel = Panel(Text("Hello", justify="right"))
print(panel)

from rich.console import Group

panel_group = Group(
    Panel("Hello", style="on blue"),
    Panel("World", style="on red"),
)
print(Panel(panel_group))

from rich.padding import Padding
test = Padding("Hello", (0, 4))
print(test)

import time

from rich.live import Live
from rich.table import Table

table = Table()
table.add_column("Row ID")
table.add_column("Description")
示例#15
0
    style: Optional[Style] = None    
    is_control: bool = False    
"""
    syntax = Syntax(code, "python", theme="monokai", line_numbers=True)

    markdown = Markdown("""\
### example.md
> Hello, World!
> 
> Markdown _all_ the things
""")

    root = Tree("🌲 [b green]Rich Tree", highlight=True, hide_root=True)

    node = root.add(":file_folder: Renderables", guide_style="red")
    simple_node = node.add(":file_folder: [bold yellow]Atomic",
                           guide_style="uu green")
    simple_node.add(Group("📄 Syntax", syntax))
    simple_node.add(Group("📄 Markdown", Panel(markdown, border_style="green")))

    containers_node = node.add(":file_folder: [bold magenta]Containers",
                               guide_style="bold magenta")
    containers_node.expanded = True
    panel = Panel.fit("Just a panel", border_style="red")
    containers_node.add(Group("📄 Panels", panel))

    containers_node.add(Group("📄 [b magenta]Table", table))

    console = Console()
    console.print(root)
示例#16
0
文件: info.py 项目: nf-core/tools
    def generate_module_info_help(self):
        """Take the parsed meta.yml and generate rich help.

        Returns:
            rich renderable
        """

        renderables = []

        # Intro panel
        intro_text = Text()
        if self.local_path:
            intro_text.append(
                Text.from_markup(f"Location: [blue]{self.local_path}\n"))
        elif self.remote_location:
            intro_text.append(
                Text.from_markup(
                    f":globe_with_meridians: Repository: [link=https://github.com/{self.remote_location}]{self.remote_location}[/]\n"
                ))

        if self.meta.get("tools"):
            tools_strings = []
            for tool in self.meta["tools"]:
                for tool_name, tool_meta in tool.items():
                    tools_strings.append(
                        f"[link={tool_meta['homepage']}]{tool_name}")
            intro_text.append(
                Text.from_markup(
                    f":wrench: Tools: {', '.join(tools_strings)}\n",
                    style="dim"))

        if self.meta.get("description"):
            intro_text.append(
                Text.from_markup(
                    f":book: Description: {self.meta['description']}",
                    style="dim"))

        renderables.append(
            Panel(
                intro_text,
                title=f"[bold]Module: [green]{self.module}\n",
                title_align="left",
            ))

        # Inputs
        if self.meta.get("input"):
            inputs_table = Table(expand=True,
                                 show_lines=True,
                                 box=box.MINIMAL_HEAVY_HEAD,
                                 padding=0)
            inputs_table.add_column(":inbox_tray: Inputs")
            inputs_table.add_column("Description")
            inputs_table.add_column("Pattern", justify="right", style="green")
            for input in self.meta["input"]:
                for key, info in input.items():
                    inputs_table.add_row(
                        f"[orange1 on black] {key} [/][dim i] ({info['type']})",
                        Markdown(info["description"]
                                 if info["description"] else ""),
                        info.get("pattern", ""),
                    )

            renderables.append(inputs_table)

        # Outputs
        if self.meta.get("output"):
            outputs_table = Table(expand=True,
                                  show_lines=True,
                                  box=box.MINIMAL_HEAVY_HEAD,
                                  padding=0)
            outputs_table.add_column(":outbox_tray: Outputs")
            outputs_table.add_column("Description")
            outputs_table.add_column("Pattern", justify="right", style="green")
            for output in self.meta["output"]:
                for key, info in output.items():
                    outputs_table.add_row(
                        f"[orange1 on black] {key} [/][dim i] ({info['type']})",
                        Markdown(info["description"]
                                 if info["description"] else ""),
                        info.get("pattern", ""),
                    )

            renderables.append(outputs_table)

        # Installation command
        if self.remote_location:
            cmd_base = "nf-core modules"
            if self.remote_location != "nf-core/modules":
                cmd_base = f"nf-core modules --github-repository {self.remote_location}"
            renderables.append(
                Text.from_markup(
                    f"\n :computer:  Installation command: [magenta]{cmd_base} install {self.module}\n"
                ))

        return Group(*renderables)
示例#17
0
)
# progress bar for current app (progress in steps)
app_steps_progress = Progress(
    TextColumn(
        "[bold blue]Progress for app {task.fields[name]}: {task.percentage:.0f}%"
    ),
    BarColumn(),
    TextColumn("({task.completed} of {task.total} steps done)"),
)
# overall progress bar
overall_progress = Progress(TimeElapsedColumn(), BarColumn(),
                            TextColumn("{task.description}"))
# group of progress bars;
# some are always visible, others will disappear when progress is complete
progress_group = Panel(Group(
    Group(current_app_progress, step_progress, app_steps_progress),
    Panel(overall_progress, box=box.HORIZONTALS),
),
                       box=box.ASCII)

# tuple specifies how long each step takes for that app
step_actions = ("downloading", "configuring", "building", "installing")
apps = [
    ("one", (2, 1, 4, 2)),
    ("two", (1, 3, 8, 4)),
    ("three", (2, 1, 3, 2)),
]

# create overall progress bar
overall_task_id = overall_progress.add_task("", total=len(apps))

# use own live instance as context manager with group of progress bars,
示例#18
0
 def update(self):
     self.lv.update(Group(self.header, self.stack, self.footer),
                    refresh=True)
示例#19
0
class YataiClient:
    log_progress = ProgressWrapper(Progress(
        TextColumn("{task.description}"), ))

    spinner_progress = ProgressWrapper(
        Progress(
            TextColumn("  "),
            TimeElapsedColumn(),
            TextColumn("[bold purple]{task.fields[action]}"),
            SpinnerColumn("simpleDots"),
        ))

    transmission_progress = ProgressWrapper(
        Progress(
            TextColumn("[bold blue]{task.description}", justify="right"),
            BarColumn(bar_width=None),
            "[progress.percentage]{task.percentage:>3.1f}%",
            "•",
            DownloadColumn(),
            "•",
            TransferSpeedColumn(),
            "•",
            TimeRemainingColumn(),
        ))

    progress_group = Group(Panel(Group(log_progress, spinner_progress)),
                           transmission_progress)

    @contextmanager
    def spin(self, *, text: str):
        task_id = self.spinner_progress.add_task("", action=text)
        try:
            yield
        finally:
            self.spinner_progress.stop_task(task_id)
            self.spinner_progress.update(task_id, visible=False)

    @inject
    def push_bento(
        self,
        bento: "Bento",
        *,
        force: bool = False,
        model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
    ):
        with Live(self.progress_group):
            upload_task_id = self.transmission_progress.add_task(
                f'Pushing Bento "{bento.tag}"', start=False, visible=False)
            self._do_push_bento(bento,
                                upload_task_id,
                                force=force,
                                model_store=model_store)

    @inject
    def _do_push_bento(
        self,
        bento: "Bento",
        upload_task_id: TaskID,
        *,
        force: bool = False,
        model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
    ):
        yatai_rest_client = get_current_yatai_rest_api_client()
        name = bento.tag.name
        version = bento.tag.version
        if version is None:
            raise BentoMLException(f"Bento {bento.tag} version cannot be None")
        info = bento.info
        model_names = info.models
        with ThreadPoolExecutor(
                max_workers=max(len(model_names), 1)) as executor:

            def push_model(model: "Model"):
                model_upload_task_id = self.transmission_progress.add_task(
                    f'Pushing model "{model.tag}"', start=False, visible=False)
                self._do_push_model(model, model_upload_task_id, force=force)

            futures = executor.map(push_model, (model_store.get(name)
                                                for name in model_names))
            list(futures)
        with self.spin(text=f'Fetching Bento repository "{name}"'):
            bento_repository = yatai_rest_client.get_bento_repository(
                bento_repository_name=name)
        if not bento_repository:
            with self.spin(
                    text=f'Bento repository "{name}" not found, creating now..'
            ):
                bento_repository = yatai_rest_client.create_bento_repository(
                    req=CreateBentoRepositorySchema(name=name, description=""))
        with self.spin(text=f'Try fetching Bento "{bento.tag}" from Yatai..'):
            remote_bento = yatai_rest_client.get_bento(
                bento_repository_name=name, version=version)
        if (not force and remote_bento
                and remote_bento.upload_status == BentoUploadStatus.SUCCESS):
            self.log_progress.add_task(
                f'[bold blue]Push failed: Bento "{bento.tag}" already exists in Yatai'
            )
            return
        if not remote_bento:
            labels: t.List[LabelItemSchema] = [
                LabelItemSchema(key=key, value=value)
                for key, value in info.labels.items()
            ]
            apis: t.Dict[str, BentoApiSchema] = {}
            models = [str(m) for m in info.models]
            with self.spin(
                    text=f'Registering Bento "{bento.tag}" with Yatai..'):
                yatai_rest_client.create_bento(
                    bento_repository_name=bento_repository.name,
                    req=CreateBentoSchema(
                        description="",
                        version=version,
                        build_at=info.creation_time,
                        manifest=BentoManifestSchema(
                            service=info.service,
                            bentoml_version=info.bentoml_version,
                            apis=apis,
                            models=models,
                            size_bytes=calc_dir_size(bento.path),
                        ),
                        labels=labels,
                    ),
                )
        with self.spin(
                text=f'Getting a presigned upload url for "{bento.tag}" ..'):
            remote_bento = yatai_rest_client.presign_bento_upload_url(
                bento_repository_name=bento_repository.name, version=version)
        with io.BytesIO() as tar_io:
            bento_dir_path = bento.path
            if bento_dir_path is None:
                raise BentoMLException(f'Bento "{bento}" path cannot be None')
            with self.spin(
                    text=f'Creating tar archive for Bento "{bento.tag}"..'):
                with tarfile.open(fileobj=tar_io, mode="w:gz") as tar:

                    def filter_(
                        tar_info: tarfile.TarInfo,
                    ) -> t.Optional[tarfile.TarInfo]:
                        if tar_info.path == "./models" or tar_info.path.startswith(
                                "./models/"):
                            return None
                        return tar_info

                    tar.add(bento_dir_path, arcname="./", filter=filter_)
            tar_io.seek(0, 0)
            with self.spin(text=f'Start uploading Bento "{bento.tag}"..'):
                yatai_rest_client.start_upload_bento(
                    bento_repository_name=bento_repository.name,
                    version=version)

            file_size = tar_io.getbuffer().nbytes

            self.transmission_progress.update(upload_task_id,
                                              completed=0,
                                              total=file_size,
                                              visible=True)
            self.transmission_progress.start_task(upload_task_id)

            def io_cb(x: int):
                self.transmission_progress.update(upload_task_id, advance=x)

            wrapped_file = CallbackIOWrapper(
                io_cb,
                tar_io,
                "read",
            )
            finish_req = FinishUploadBentoSchema(
                status=BentoUploadStatus.SUCCESS,
                reason="",
            )
            try:
                resp = requests.put(remote_bento.presigned_upload_url,
                                    data=wrapped_file)
                if resp.status_code != 200:
                    finish_req = FinishUploadBentoSchema(
                        status=BentoUploadStatus.FAILED,
                        reason=resp.text,
                    )
            except Exception as e:  # pylint: disable=broad-except
                finish_req = FinishUploadBentoSchema(
                    status=BentoUploadStatus.FAILED,
                    reason=str(e),
                )
            if finish_req.status is BentoUploadStatus.FAILED:
                self.log_progress.add_task(
                    f'[bold red]Failed to upload Bento "{bento.tag}"')
            with self.spin(text="Submitting upload status to Yatai"):
                yatai_rest_client.finish_upload_bento(
                    bento_repository_name=bento_repository.name,
                    version=version,
                    req=finish_req,
                )
            if finish_req.status != BentoUploadStatus.SUCCESS:
                self.log_progress.add_task(
                    f'[bold red]Failed pushing Bento "{bento.tag}": {finish_req.reason}'
                )
            else:
                self.log_progress.add_task(
                    f'[bold green]Successfully pushed Bento "{bento.tag}"')

    @inject
    def pull_bento(
        self,
        tag: t.Union[str, Tag],
        *,
        force: bool = False,
        bento_store: "BentoStore" = Provide[BentoMLContainer.bento_store],
        model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
    ) -> "Bento":
        with Live(self.progress_group):
            download_task_id = self.transmission_progress.add_task(
                f'Pulling bento "{tag}"', start=False, visible=False)
            return self._do_pull_bento(
                tag,
                download_task_id,
                force=force,
                bento_store=bento_store,
                model_store=model_store,
            )

    @inject
    def _do_pull_bento(
        self,
        tag: t.Union[str, Tag],
        download_task_id: TaskID,
        *,
        force: bool = False,
        bento_store: "BentoStore" = Provide[BentoMLContainer.bento_store],
        model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
    ) -> "Bento":
        try:
            bento = bento_store.get(tag)
            if not force:
                self.log_progress.add_task(
                    f'[bold blue]Bento "{tag}" exists in local model store')
                return bento
            bento_store.delete(tag)
        except NotFound:
            pass
        _tag = Tag.from_taglike(tag)
        name = _tag.name
        version = _tag.version
        if version is None:
            raise BentoMLException(f'Bento "{_tag}" version can not be None')
        yatai_rest_client = get_current_yatai_rest_api_client()
        with self.spin(text=f'Fetching bento "{_tag}"'):
            remote_bento = yatai_rest_client.get_bento(
                bento_repository_name=name, version=version)
        if not remote_bento:
            raise BentoMLException(f'Bento "{_tag}" not found on Yatai')
        with ThreadPoolExecutor(max_workers=max(
                len(remote_bento.manifest.models), 1)) as executor:

            def pull_model(model_tag: Tag):
                model_download_task_id = self.transmission_progress.add_task(
                    f'Pulling model "{model_tag}"', start=False, visible=False)
                self._do_pull_model(
                    model_tag,
                    model_download_task_id,
                    force=force,
                    model_store=model_store,
                )

            futures = executor.map(pull_model, remote_bento.manifest.models)
            list(futures)
        with self.spin(
                text=f'Getting a presigned download url for bento "{_tag}"'):
            remote_bento = yatai_rest_client.presign_bento_download_url(
                name, version)
        url = remote_bento.presigned_download_url
        response = requests.get(url, stream=True)
        if response.status_code != 200:
            raise BentoMLException(
                f'Failed to download bento "{_tag}": {response.text}')
        total_size_in_bytes = int(response.headers.get("content-length", 0))
        block_size = 1024  # 1 Kibibyte
        with NamedTemporaryFile() as tar_file:
            self.transmission_progress.update(download_task_id,
                                              completed=0,
                                              total=total_size_in_bytes,
                                              visible=True)
            self.transmission_progress.start_task(download_task_id)
            for data in response.iter_content(block_size):
                self.transmission_progress.update(download_task_id,
                                                  advance=len(data))
                tar_file.write(data)
            self.log_progress.add_task(
                f'[bold green]Finished downloading all bento "{_tag}" files')
            tar_file.seek(0, 0)
            tar = tarfile.open(fileobj=tar_file, mode="r:gz")
            with fs.open_fs("temp://") as temp_fs:
                for member in tar.getmembers():
                    f = tar.extractfile(member)
                    if f is None:
                        continue
                    p = Path(member.name)
                    if p.parent != Path("."):
                        temp_fs.makedirs(str(p.parent), recreate=True)
                    temp_fs.writebytes(member.name, f.read())
                bento = Bento.from_fs(temp_fs)
                for model_tag in remote_bento.manifest.models:
                    with self.spin(
                            text=f'Copying model "{model_tag}" to bento'):
                        copy_model(
                            model_tag,
                            src_model_store=model_store,
                            target_model_store=bento.
                            _model_store,  # type: ignore
                        )
                bento = bento.save(bento_store)
                self.log_progress.add_task(
                    f'[bold green]Successfully pulled bento "{_tag}"')
                return bento

    def push_model(self, model: "Model", *, force: bool = False):
        with Live(self.progress_group):
            upload_task_id = self.transmission_progress.add_task(
                f'Pushing model "{model.tag}"', start=False, visible=False)
            self._do_push_model(model, upload_task_id, force=force)

    def _do_push_model(self,
                       model: "Model",
                       upload_task_id: TaskID,
                       *,
                       force: bool = False):
        yatai_rest_client = get_current_yatai_rest_api_client()
        name = model.tag.name
        version = model.tag.version
        if version is None:
            raise BentoMLException(
                f'Model "{model.tag}" version cannot be None')
        info = model.info
        with self.spin(text=f'Fetching model repository "{name}"'):
            model_repository = yatai_rest_client.get_model_repository(
                model_repository_name=name)
        if not model_repository:
            with self.spin(
                    text=f'Model repository "{name}" not found, creating now..'
            ):
                model_repository = yatai_rest_client.create_model_repository(
                    req=CreateModelRepositorySchema(name=name, description=""))
        with self.spin(text=f'Try fetching model "{model.tag}" from Yatai..'):
            remote_model = yatai_rest_client.get_model(
                model_repository_name=name, version=version)
        if (not force and remote_model
                and remote_model.upload_status == ModelUploadStatus.SUCCESS):
            self.log_progress.add_task(
                f'[bold blue]Model "{model.tag}" already exists in Yatai, skipping'
            )
            return
        if not remote_model:
            labels: t.List[LabelItemSchema] = [
                LabelItemSchema(key=key, value=value)
                for key, value in info.labels.items()
            ]
            with self.spin(
                    text=f'Registering model "{model.tag}" with Yatai..'):
                yatai_rest_client.create_model(
                    model_repository_name=model_repository.name,
                    req=CreateModelSchema(
                        description="",
                        version=version,
                        build_at=info.creation_time,
                        manifest=ModelManifestSchema(
                            module=info.module,
                            metadata=info.metadata,
                            context=info.context,
                            options=info.options,
                            api_version=info.api_version,
                            bentoml_version=info.bentoml_version,
                            size_bytes=calc_dir_size(model.path),
                        ),
                        labels=labels,
                    ),
                )
        with self.spin(
                text=f'Getting a presigned upload url for model "{model.tag}"..'
        ):
            remote_model = yatai_rest_client.presign_model_upload_url(
                model_repository_name=model_repository.name, version=version)
        with io.BytesIO() as tar_io:
            bento_dir_path = model.path
            with self.spin(
                    text=f'Creating tar archive for model "{model.tag}"..'):
                with tarfile.open(fileobj=tar_io, mode="w:gz") as tar:
                    tar.add(bento_dir_path, arcname="./")
            tar_io.seek(0, 0)
            with self.spin(text=f'Start uploading model "{model.tag}"..'):
                yatai_rest_client.start_upload_model(
                    model_repository_name=model_repository.name,
                    version=version)
            file_size = tar_io.getbuffer().nbytes
            self.transmission_progress.update(
                upload_task_id,
                description=f'Uploading model "{model.tag}"',
                total=file_size,
                visible=True,
            )
            self.transmission_progress.start_task(upload_task_id)

            def io_cb(x: int):
                self.transmission_progress.update(upload_task_id, advance=x)

            wrapped_file = CallbackIOWrapper(
                io_cb,
                tar_io,
                "read",
            )
            finish_req = FinishUploadModelSchema(
                status=ModelUploadStatus.SUCCESS,
                reason="",
            )
            try:
                resp = requests.put(remote_model.presigned_upload_url,
                                    data=wrapped_file)
                if resp.status_code != 200:
                    finish_req = FinishUploadModelSchema(
                        status=ModelUploadStatus.FAILED,
                        reason=resp.text,
                    )
            except Exception as e:  # pylint: disable=broad-except
                finish_req = FinishUploadModelSchema(
                    status=ModelUploadStatus.FAILED,
                    reason=str(e),
                )
            if finish_req.status is ModelUploadStatus.FAILED:
                self.log_progress.add_task(
                    f'[bold red]Failed to upload model "{model.tag}"')
            with self.spin(text="Submitting upload status to Yatai"):
                yatai_rest_client.finish_upload_model(
                    model_repository_name=model_repository.name,
                    version=version,
                    req=finish_req,
                )
            if finish_req.status != ModelUploadStatus.SUCCESS:
                self.log_progress.add_task(
                    f'[bold red]Failed pushing model "{model.tag}" : {finish_req.reason}'
                )
            else:
                self.log_progress.add_task(
                    f'[bold green]Successfully pushed model "{model.tag}"')

    @inject
    def pull_model(
        self,
        tag: t.Union[str, Tag],
        *,
        force: bool = False,
        model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
    ) -> "Model":
        with Live(self.progress_group):
            download_task_id = self.transmission_progress.add_task(
                f'Pulling model "{tag}"', start=False, visible=False)
            return self._do_pull_model(tag,
                                       download_task_id,
                                       force=force,
                                       model_store=model_store)

    @inject
    def _do_pull_model(
        self,
        tag: t.Union[str, Tag],
        download_task_id: TaskID,
        *,
        force: bool = False,
        model_store: "ModelStore" = Provide[BentoMLContainer.model_store],
    ) -> "Model":
        try:
            model = model_store.get(tag)
            if not force:
                self.log_progress.add_task(
                    f'[bold blue]Model "{tag}" already exists locally, skipping'
                )
                return model
            else:
                model_store.delete(tag)
        except NotFound:
            pass
        yatai_rest_client = get_current_yatai_rest_api_client()
        _tag = Tag.from_taglike(tag)
        name = _tag.name
        version = _tag.version
        if version is None:
            raise BentoMLException(f'Model "{_tag}" version cannot be None')
        with self.spin(
                text=f'Getting a presigned download url for model "{_tag}"..'):
            remote_model = yatai_rest_client.presign_model_download_url(
                name, version)
        if not remote_model:
            raise BentoMLException(f'Model "{_tag}" not found on Yatai')
        url = remote_model.presigned_download_url
        response = requests.get(url, stream=True)
        if response.status_code != 200:
            raise BentoMLException(
                f'Failed to download model "{_tag}": {response.text}')
        total_size_in_bytes = int(response.headers.get("content-length", 0))
        block_size = 1024  # 1 Kibibyte
        with NamedTemporaryFile() as tar_file:
            self.transmission_progress.update(
                download_task_id,
                description=f'Downloading model "{_tag}"',
                total=total_size_in_bytes,
                visible=True,
            )
            self.transmission_progress.start_task(download_task_id)
            for data in response.iter_content(block_size):
                self.transmission_progress.update(download_task_id,
                                                  advance=len(data))
                tar_file.write(data)
            self.log_progress.add_task(
                f'[bold green]Finished downloading model "{_tag}" files')
            tar_file.seek(0, 0)
            tar = tarfile.open(fileobj=tar_file, mode="r:gz")
            with fs.open_fs("temp://") as temp_fs:
                for member in tar.getmembers():
                    f = tar.extractfile(member)
                    if f is None:
                        continue
                    p = Path(member.name)
                    if p.parent != Path("."):
                        temp_fs.makedirs(str(p.parent), recreate=True)
                    temp_fs.writebytes(member.name, f.read())
                model = Model.from_fs(temp_fs).save(model_store)
                self.log_progress.add_task(
                    f'[bold green]Successfully pulled model "{_tag}"')
                return model
示例#20
0
def main():
    p = argparse.ArgumentParser()
    p.add_argument("--report", type=Path, required=True)
    p.add_argument("--config", type=Path, required=True)
    p.add_argument("--strip-prefix-path", type=Path)

    args = p.parse_args()

    console = Console()

    with args.config.open() as fh:
        config = yaml.safe_load(fh)

    data = []
    with args.report.open() as fh:
        data = ItemCollection(__root__=json.load(fh)).__root__
        for item in data:
            if args.strip_prefix_path and not item.path.is_absolute:
                item.path = item.path.relative_to(args.strip_prefix_path)

    counts = config["limits"].copy()

    kf = lambda i: i.path
    for file, items in itertools.groupby(sorted(data, key=kf), key=kf):

        output = []
        for item in items:
            if item.code in config["ignore"]:
                continue

            emoji = Emoji({
                "warning": "yellow_circle",
                "error": "red_circle"
            }[item.severity])

            style = "bold "
            if item.severity == "warning":
                style += "yellow"
            elif item.severity == "error":
                style += "red"

            s = Text()
            s.append(f"{emoji}")
            s.append(f" {item.path}:{item.line}:{item.col}", style="bold")
            s.append(f" {item.severity.upper()} ", style=style)
            s.append("[")
            s.append(item.code, style="bold")
            s.append(f"]")

            output.append(s)

            def subpath(m):
                return f"[bold]{m.group(1)}[/bold]:"

            message = re.sub(r"([\w/.\-+]+:\d+:\d+):", subpath, item.message)
            output.append(Panel(message))
            output.append(Rule())

            for pattern in counts.keys():

                if not fnmatch.fnmatch(item.code, pattern):
                    continue
                counts[pattern] += 1

        output = output[:-1]
        console.print(Panel(Group(*output), title=str(file)))

    table = Table()
    table.add_column("", width=2)
    table.add_column("code / pattern")
    table.add_column("count", justify="right")
    table.add_column("limit", justify="right")
    exit = 0
    for pattern, count in counts.items():
        limit = config["limits"][pattern]
        emoji = Emoji("green_circle")
        style = "green"
        if count > limit:
            exit = 1
            emoji = Emoji("red_circle")
            style = "red bold"
        table.add_row(emoji, pattern, str(count), str(limit), style=style)

    console.rule()
    console.print(Panel.fit(table, title="Results"), justify="center")

    if exit != 0:
        console.print(
            Panel(
                Text(f"{Emoji('red_circle')} FAILURE", justify="center"),
                style="red bold",
            ))
    else:
        console.print(
            Panel(
                Text(f"{Emoji('green_circle')} SUCCESS", justify="center"),
                style="green bold",
            ))

    sys.exit(exit)
示例#21
0
)
# progress bar for current app (progress in steps)
app_steps_progress = Progress(
    TextColumn(
        "[bold blue]Progress for app {task.fields[name]}: {task.percentage:.0f}%"
    ),
    BarColumn(),
    TextColumn("({task.completed} of {task.total} steps done)"),
)
# overall progress bar
overall_progress = Progress(TimeElapsedColumn(), BarColumn(),
                            TextColumn("{task.description}"))
# group of progress bars;
# some are always visible, others will disappear when progress is complete
progress_group = Group(
    Panel(Group(current_app_progress, step_progress, app_steps_progress)),
    overall_progress,
)

# tuple specifies how long each step takes for that app
step_actions = ("downloading", "configuring", "building", "installing")
apps = [
    ("one", (2, 1, 4, 2)),
    ("two", (1, 3, 8, 4)),
    ("three", (2, 1, 3, 2)),
]

# create overall progress bar
overall_task_id = overall_progress.add_task("", total=len(apps))

# use own live instance as context manager with group of progress bars,
# which allows for running multiple different progress bars in parallel,