예제 #1
0
    def __init__(self, directory: str, theme: str) -> None:
        """
        Initialization method.

        If the given theme is not supported (it does not exist), it will look for a `FALLBACK_THEME` attribute
        in `self` to use as a fallback theme.

        Arguments:
            directory: The name of the directory containing the themes for this renderer.
            theme: The name of theme to use.
        """
        themes_dir = Path(__file__).parent.parent / "templates" / directory
        theme_dir = themes_dir / theme
        if not theme_dir.exists():
            if hasattr(self, "FALLBACK_THEME"):
                log.warning(
                    f"mkdocstrings.handlers: No '{theme}' theme in '{directory}', "
                    f"falling back to theme '{self.FALLBACK_THEME}'")
                theme_dir = themes_dir / self.FALLBACK_THEME
            else:
                raise ThemeNotSupported(theme)

        self.env = Environment(autoescape=True,
                               loader=FileSystemLoader(theme_dir))
        self.env.filters["highlight"] = do_highlight
        self.env.filters["any"] = do_any
예제 #2
0
    def on_post_page(self, output: str, page: Page, config: Config,
                     **kwargs) -> str:
        """
        Hook for the [`on_post_page` event](https://www.mkdocs.org/user-guide/plugins/#on_post_page).

        In this hook, we try to fix unresolved references of the form `[title][identifier]` or `[identifier][]`.
        Doing that allows the user of `mkdocstrings` to cross-reference objects in their documentation strings.
        It uses the native Markdown syntax so it's easy to remember and use.

        We log a warning for each reference that we couldn't map to an URL, but try to be smart and ignore identifiers
        that do not look legitimate (sometimes documentation can contain strings matching
        our [`AUTO_REF`][mkdocstrings.plugin.AUTO_REF] regular expression that did not intend to reference anything).
        We currently ignore references when their identifier contains a space or a slash.
        """
        log.debug(
            f"mkdocstrings.plugin: Fixing references in page {page.file.src_path}"
        )

        placeholder = Placeholder()
        while re.search(placeholder.seed, output) or any(
                placeholder.seed in url for url in self.url_map.values()):
            placeholder.set_seed()

        unmapped, unintended = [], []
        soup = BeautifulSoup(output, "html.parser")
        placeholder.replace_code_tags(soup)
        fixed_soup = AUTO_REF.sub(self.fix_ref(unmapped, unintended),
                                  str(soup))

        if unmapped or unintended:
            # We do nothing with unintended refs
            if unmapped and log.isEnabledFor(logging.WARNING):
                for ref in unmapped:
                    log.warning(
                        f"mkdocstrings.plugin: {page.file.src_path}: Could not fix ref '[{ref}]'.\n    "
                        f"The referenced object was not both collected and rendered."
                    )

        return placeholder.restore_code_tags(fixed_soup)
예제 #3
0
    def collect(self, identifier: str, config: dict) -> dict:
        """
        Collect the documentation tree given an identifier and selection options.

        In this method, we feed one line of JSON to the standard input of the subprocess that was opened
        during instantiation of the collector. Then we read one line of JSON on its standard output.

        We load back the JSON text into a Python dictionary.
        If there is a decoding error, we log it as error and raise a CollectionError.

        If the dictionary contains an `error` key, we log it  as error (with the optional `traceback` value),
        and raise a CollectionError.

        If the dictionary values for keys `loading_errors` and `parsing_errors` are not empty,
        we log them as warnings.

        Then we pick up the only object within the `objects` list (there's always only one, because we collect
        them one by one), rebuild it's categories lists
        (see [`rebuild_category_lists()`][mkdocstrings.handlers.python.rebuild_category_lists]),
        and return it.

        Arguments:
            identifier: The dotted-path of a Python object available in the Python path.
            config: Selection options, used to alter the data collection done by `pytkdocs`.

        Returns:
            The collected object-tree.
        """
        final_config = dict(self.DEFAULT_CONFIG)
        final_config.update(config)

        log.debug("mkdocstrings.handlers.python: Preparing input")
        json_input = json.dumps(
            {"objects": [{
                "path": identifier,
                **final_config
            }]})

        log.debug("mkdocstrings.handlers.python: Writing to process' stdin")
        print(json_input, file=self.process.stdin, flush=True)

        log.debug("mkdocstrings.handlers.python: Reading process' stdout")
        stdout = self.process.stdout.readline()

        log.debug(
            "mkdocstrings.handlers.python: Loading JSON output as Python object"
        )
        try:
            result = json.loads(stdout)
        except json.decoder.JSONDecodeError as error:
            log.error(
                f"mkdocstrings.handlers.python: Error while loading JSON: {stdout}"
            )
            raise CollectionError(str(error))

        if "error" in result:
            message = f"mkdocstrings.handlers.python: Collection failed: {result['error']}"
            if "traceback" in result:
                message += f"\n{result['traceback']}"
            log.error(message)
            raise CollectionError(result["error"])

        if result["loading_errors"]:
            for error in result["loading_errors"]:
                log.warning(f"mkdocstrings.handlers.python: {error}")

        if result["parsing_errors"]:
            for path, errors in result["parsing_errors"].items():
                for error in errors:
                    log.warning(f"mkdocstrings.handlers.python: {error}")

        # We always collect only one object at a time
        result = result["objects"][0]

        log.debug(
            "mkdocstrings.handlers.python: Rebuilding categories and children lists"
        )
        rebuild_category_lists(result)

        return result
예제 #4
0
    def get_class_documentation(
            self,
            class_: Type[Any],
            module: Optional[ModuleType] = None,
            parent_classes: Optional[List[str]] = None) -> Class:
        if module is None:
            module = inspect.getmodule(class_)
        class_name = class_.__name__
        path = module.__name__
        parent_classes = parent_classes or []
        if parent_classes:
            path = ".".join([path] + parent_classes)
        path = f"{path}.{class_name}"
        file_path = module.__file__
        try:
            signature = inspect.signature(class_)
        except ValueError:
            print(f"Failed to get signature for {class_name}")
            signature = inspect.Signature()
        docstring = Docstring(textwrap.dedent(class_.__doc__ or ""), signature)
        root_object = Class(
            name=class_name,
            path=path,
            file_path=file_path,
            docstring=docstring,
        )

        for member_name, member in sorted(
                filter(lambda m: not self.filter_name_out(m[0]),
                       class_.__dict__.items())):
            if inspect.isclass(member):
                root_object.add_child(
                    self.get_class_documentation(
                        member, module, parent_classes + [class_name]))
                continue

            member_class = properties = signature = None
            member_path = f"{path}.{member_name}"
            actual_member = getattr(class_, member_name)
            docstring = inspect.getdoc(actual_member) or ""
            try:
                source = inspect.getsourcelines(actual_member)
            except OSError as error:
                log.warning(
                    f"Could not read source for object {member_path}: {error}")
                source = ""
            except TypeError:
                source = ""

            if isinstance(member, classmethod):
                properties = ["classmethod"]
                member_class = Method
                signature = inspect.signature(actual_member)
            elif isinstance(member, staticmethod):
                properties = ["staticmethod"]
                member_class = Method
                signature = inspect.signature(actual_member)
            elif isinstance(member, type(lambda: 0)):  # regular method
                if RE_SPECIAL.match(member_name):
                    parent_classes = class_.__mro__[1:]
                    for parent_class in parent_classes:
                        try:
                            parent_member = getattr(parent_class, member_name)
                        except AttributeError:
                            continue
                        else:
                            if docstring == inspect.getdoc(parent_member):
                                docstring = ""
                            break
                member_class = Method
                signature = inspect.signature(actual_member)
            elif isinstance(member, property):
                properties = [
                    "property",
                    "readonly" if member.fset is None else "writable"
                ]
                signature = inspect.signature(actual_member.fget)
                member_class = Attribute

            if member_class:
                root_object.add_child(
                    member_class(
                        name=member_name,
                        path=member_path,
                        file_path=file_path,
                        docstring=Docstring(docstring, signature),
                        properties=properties,
                        source=source,
                    ))
        return root_object