示例#1
0
    def _format_action(self, action: Action):
        if isinstance(action, argparse._SubParsersAction):  # pylint: disable=protected-access
            parts = []
            subactions = action._get_subactions()  # pylint: disable=protected-access
            action_subcommnads, group_subcommnands = partition(
                lambda d: isinstance(ALL_COMMANDS_DICT[d.dest], GroupCommand), subactions
            )
            parts.append("\n")
            parts.append('%*s%s:\n' % (self._current_indent, '', "Groups"))
            self._indent()
            for subaction in group_subcommnands:
                parts.append(self._format_action(subaction))
            self._dedent()

            parts.append("\n")
            parts.append('%*s%s:\n' % (self._current_indent, '', "Commands"))
            self._indent()

            for subaction in action_subcommnads:
                parts.append(self._format_action(subaction))
            self._dedent()

            # return a single string
            return self._join_parts(parts)

        return super()._format_action(action)
示例#2
0
    def display_recursive(
        prefix: List[str],
        commands: Iterable[Union[GroupCommand, ActionCommand]],
        help_msg: Optional[str] = None,
    ):
        actions: List[ActionCommand]
        groups: List[GroupCommand]
        actions_iter, groups_iter = partition(
            lambda x: isinstance(x, GroupCommand), commands)
        actions, groups = list(actions_iter), list(groups_iter)

        console = Console()
        if actions:
            table = SimpleTable(title=help_msg or "Miscellaneous commands")
            table.add_column(width=40)
            table.add_column()
            for action_command in sorted(actions, key=lambda d: d.name):
                table.add_row(" ".join([*prefix, action_command.name]),
                              action_command.help)
            console.print(table)

        if groups:
            for group_command in sorted(groups, key=lambda d: d.name):
                group_prefix = [*prefix, group_command.name]
                display_recursive(group_prefix, group_command.subcommands,
                                  group_command.help)
示例#3
0
    def display_recursive(prefix: List[str],
                          commands: Iterable[Union[GroupCommand,
                                                   ActionCommand]]):
        actions: List[ActionCommand]
        groups: List[GroupCommand]
        actions_tter, groups_iter = partition(
            lambda x: isinstance(x, GroupCommand), commands)
        actions, groups = list(actions_tter), list(groups_iter)

        if actions:
            for action_command in sorted(actions, key=lambda d: d.name):
                print("  ", end="")
                cprint(" ".join([*prefix, action_command.name]),
                       attrs=["bold"],
                       end="")
                print(f" - {action_command.help}")
            print()

        if groups:
            for group_command in sorted(groups, key=lambda d: d.name):
                group_prefix = [*prefix, group_command.name]
                # print(bold(" ".join(group_prefix)), end="")
                cprint(group_command.help, attrs=["bold", "underline"])
                print()
                display_recursive(group_prefix, group_command.subcommands)

            print()
示例#4
0
def fetch_inventories():
    """Fetch all inventories for Airflow documentation packages and store in cache."""
    os.makedirs(os.path.dirname(CACHE_DIR), exist_ok=True)
    to_download: List[Tuple[str, str, str]] = []

    for pkg_name in get_available_providers_packages():
        to_download.append((
            pkg_name,
            S3_DOC_URL_VERSIONED.format(package_name=pkg_name),
            f'{CACHE_DIR}/{pkg_name}/objects.inv',
        ))
    for pkg_name in ['apache-airflow', 'helm-chart']:
        to_download.append((
            pkg_name,
            S3_DOC_URL_VERSIONED.format(package_name=pkg_name),
            f'{CACHE_DIR}/{pkg_name}/objects.inv',
        ))
    for pkg_name in ['apache-airflow-providers', 'docker-stack']:
        to_download.append((
            pkg_name,
            S3_DOC_URL_NON_VERSIONED.format(package_name=pkg_name),
            f'{CACHE_DIR}/{pkg_name}/objects.inv',
        ))
    to_download.extend((
        pkg_name,
        f"{doc_url}/objects.inv",
        f'{CACHE_DIR}/{pkg_name}/objects.inv',
    ) for pkg_name, doc_url in THIRD_PARTY_INDEXES.items())

    to_download = [(pkg_name, url, path) for pkg_name, url, path in to_download
                   if _is_outdated(path)]
    if not to_download:
        print("Nothing to do")
        return []

    print(f"To download {len(to_download)} inventorie(s)")

    with requests.Session() as session, concurrent.futures.ThreadPoolExecutor(
            DEFAULT_POOLSIZE) as pool:
        download_results: Iterator[Tuple[str, bool]] = pool.map(
            _fetch_file,
            repeat(session, len(to_download)),
            (pkg_name for pkg_name, _, _ in to_download),
            (url for _, url, _ in to_download),
            (path for _, _, path in to_download),
        )
    failed, success = partition(lambda d: d[1], download_results)
    failed, success = list(failed), list(success)
    print(f"Result: {len(success)} success, {len(failed)} failed")
    if failed:
        print("Failed packages:")
        for pkg_no, (pkg_name, _) in enumerate(failed, start=1):
            print(f"{pkg_no}. {pkg_name}")

    return [pkg_name for pkg_name, status in failed]
示例#5
0
def _sort_args(args: Iterable[Arg]) -> Iterable[Arg]:
    """
    Sort subcommand optional args, keep positional args
    """
    def get_long_option(arg: Arg):
        """
        Get long option from Arg.flags
        """
        return arg.flags[0] if len(arg.flags) == 1 else arg.flags[1]
    positional, optional = partition(lambda x: x.flags[0].startswith("-"), args)
    yield from positional
    yield from sorted(optional, key=lambda x: get_long_option(x).lower())
示例#6
0
def main():
    """Main code"""
    args = _get_parser().parse_args()
    available_packages = get_available_packages()
    docs_only = args.docs_only
    spellcheck_only = args.spellcheck_only
    disable_checks = args.disable_checks
    package_filters = args.package_filter
    for_production = args.for_production

    with with_group("Available packages"):
        for pkg in sorted(available_packages):
            console.print(f" - {pkg}")

    if package_filters:
        console.print("Current package filters: ", package_filters)
    current_packages = process_package_filters(available_packages,
                                               package_filters)

    with with_group("Fetching inventories"):
        # Inventories that could not be retrieved should be built first. This may mean this is a
        # new package.
        packages_without_inventories = fetch_inventories()
    normal_packages, priority_packages = partition(
        lambda d: d in packages_without_inventories, current_packages)
    normal_packages, priority_packages = list(normal_packages), list(
        priority_packages)
    jobs = args.jobs if args.jobs != 0 else os.cpu_count()

    with with_group(
            f"Documentation will be built for {len(current_packages)} package(s) with {jobs} parallel jobs"
    ):
        for pkg_no, pkg in enumerate(current_packages, start=1):
            console.print(f"{pkg_no}. {pkg}")

    all_build_errors: Dict[Optional[str], List[DocBuildError]] = {}
    all_spelling_errors: Dict[Optional[str], List[SpellingError]] = {}
    if priority_packages:
        # Build priority packages
        package_build_errors, package_spelling_errors = build_docs_for_packages(
            current_packages=priority_packages,
            docs_only=docs_only,
            spellcheck_only=spellcheck_only,
            for_production=for_production,
            jobs=jobs,
            verbose=args.verbose,
        )
        if package_build_errors:
            all_build_errors.update(package_build_errors)
        if package_spelling_errors:
            all_spelling_errors.update(package_spelling_errors)

    # Build normal packages
    # If only one inventory is missing, the remaining packages are correct. If we are missing
    # two or more inventories, it is better to try to build for all packages as the previous packages
    # may have failed as well.
    package_build_errors, package_spelling_errors = build_docs_for_packages(
        current_packages=current_packages
        if len(priority_packages) > 1 else normal_packages,
        docs_only=docs_only,
        spellcheck_only=spellcheck_only,
        for_production=for_production,
        jobs=jobs,
        verbose=args.verbose,
    )
    if package_build_errors:
        all_build_errors.update(package_build_errors)
    if package_spelling_errors:
        all_spelling_errors.update(package_spelling_errors)

    # Build documentation for some packages again if it can help them.
    to_retry_packages = [
        package_name for package_name, errors in package_build_errors.items()
        if any(
            any((m in e.message) for m in ERRORS_ELIGIBLE_TO_REBUILD)
            for e in errors)
    ]
    if to_retry_packages:
        for package_name in to_retry_packages:
            if package_name in all_build_errors:
                del all_build_errors[package_name]
            if package_name in all_spelling_errors:
                del all_spelling_errors[package_name]

        package_build_errors, package_spelling_errors = build_docs_for_packages(
            current_packages=to_retry_packages,
            docs_only=docs_only,
            spellcheck_only=spellcheck_only,
            for_production=for_production,
            jobs=jobs,
            verbose=args.verbose,
        )
        if package_build_errors:
            all_build_errors.update(package_build_errors)
        if package_spelling_errors:
            all_spelling_errors.update(package_spelling_errors)

    if not disable_checks:
        general_errors = lint_checks.run_all_check()
        if general_errors:
            all_build_errors[None] = general_errors

    dev_index_generator.generate_index(f"{DOCS_DIR}/_build/index.html")

    if not package_filters:
        _promote_new_flags()

    if os.path.exists(PROVIDER_INIT_FILE):
        os.remove(PROVIDER_INIT_FILE)

    print_build_errors_and_exit(
        all_build_errors,
        all_spelling_errors,
    )