コード例 #1
0
    def lookup(self, identifier: Union[str, DocPath]) -> DocItem:
        """Find an item by its identifier, relative to this item or the root.

        Params:
            identifier: The item to search for.
        Returns:
            An object that's a subclass of DocItem.
        Raises:
            CollectionError: When an item by that identifier couldn't be found.
        """
        if isinstance(identifier, DocPath):
            identifier = "::" + identifier.abs_id
        obj = self.root if identifier.startswith("::") else self
        ret_obj = obj
        path = re.split(r"(::|#|\.|:|^)", identifier)
        for sep, name in zip(path[1::2], path[2::2]):
            if isinstance(obj, DocType):
                try:
                    order = _LOOKUP_ORDER[sep]
                except KeyError:
                    raise CollectionError(
                        f"{identifier!r} - unknown separator {sep!r}"
                    ) from None
                mapp = collections.ChainMap(*(getattr(obj, a) for a in order))
                obj = mapp.get(name.replace(" ", "")) or mapp.get(
                    name.split("(", 1)[0])
            else:
                obj = None
            if obj is None:
                if self.parent:
                    return self.parent.lookup(identifier)
                raise CollectionError(f"{identifier!r} - can't find {name!r}")
            ret_obj = obj
            if isinstance(obj, DocAlias):
                try:
                    obj = self.lookup(str(obj.aliased))
                except CollectionError:
                    pass
        return ret_obj
コード例 #2
0
ファイル: collector.py プロジェクト: mkdocstrings/crystal
    def _filter(
        cls,
        filters: Union[bool, Sequence[str]],
        mapp: DocMapping[D],
        getter: Callable[[D], Sequence[str]],
    ) -> DocMapping[D]:
        if filters is False:
            return DocMapping(())
        if filters is True:
            return mapp
        try:
            re.compile(filters[0])
        except (TypeError, IndexError):
            raise CollectionError(
                f"Expected a non-empty list of strings as filters, not {filters!r}"
            )

        return DocMapping(
            [item for item in mapp if _apply_filter(filters, getter(item))])
コード例 #3
0
ファイル: python.py プロジェクト: balrok/mkdocstrings
    def collect(self, identifier: str, config: dict) -> CollectorItem:
        """
        Collect the documentation tree given an identifier and selection options.

        In this method, we feed one line of JSON to the standard input of the subprocess that was opened
        during instantiation of the collector. Then we read one line of JSON on its standard output.

        We load back the JSON text into a Python dictionary.
        If there is a decoding error, we log it as error and raise a CollectionError.

        If the dictionary contains an `error` key, we log it  as error (with the optional `traceback` value),
        and raise a CollectionError.

        If the dictionary values for keys `loading_errors` and `parsing_errors` are not empty,
        we log them as warnings.

        Then we pick up the only object within the `objects` list (there's always only one, because we collect
        them one by one), rebuild it's categories lists
        (see [`rebuild_category_lists()`][mkdocstrings.handlers.python.rebuild_category_lists]),
        and return it.

        Arguments:
            identifier: The dotted-path of a Python object available in the Python path.
            config: Selection options, used to alter the data collection done by `pytkdocs`.

        Raises:
            CollectionError: When there was a problem collecting the object documentation.

        Returns:
            The collected object-tree.
        """
        final_config = ChainMap(config, self.default_config)

        log.debug("Preparing input")
        json_input = json.dumps(
            {"objects": [{
                "path": identifier,
                **final_config
            }]})

        log.debug("Writing to process' stdin")
        self.process.stdin.write(json_input + "\n")  # type: ignore
        self.process.stdin.flush()  # type: ignore

        log.debug("Reading process' stdout")
        stdout = self.process.stdout.readline()  # type: ignore

        log.debug("Loading JSON output as Python object")
        try:
            result = json.loads(stdout)
        except json.decoder.JSONDecodeError as exception:
            log.error(f"Error while loading JSON: {stdout}")
            raise CollectionError(str(exception)) from exception

        error = result.get("error")
        if error:
            message = f"Collection failed: {error}"
            if "traceback" in result:
                message += f"\n{result['traceback']}"
            log.error(message)
            raise CollectionError(error)

        for loading_error in result["loading_errors"]:
            log.warning(loading_error)

        for errors in result["parsing_errors"].values():
            for parsing_error in errors:
                log.warning(parsing_error)

        # We always collect only one object at a time
        result = result["objects"][0]

        log.debug("Rebuilding categories and children lists")
        rebuild_category_lists(result)

        return result