Esempio n. 1
0
def enable_docs(
    repo_path: pathlib.Path,
    templates: Environment,
    init_repo_templates: Environment,
) -> List[str]:
    docs_dir = PathPlus(repo_path / templates.globals["docs_dir"])
    docs_dir.maybe_make()
    (docs_dir / "api").maybe_make()

    for filename in {"index.rst"}:
        template = init_repo_templates.get_template(filename)
        (docs_dir / filename).write_clean(template.render())

    api_buf = StringList()
    header_line: str = '=' * (len(templates.globals["import_name"]) + 1)
    api_buf.append(header_line)
    api_buf.append(templates.globals["import_name"])
    api_buf.append(header_line)
    api_buf.blankline(ensure_single=True)
    api_buf.append(f".. automodule:: {templates.globals['import_name']}")
    api_buf.blankline(ensure_single=True)

    (docs_dir / "api" /
     templates.globals["modname"]).with_suffix(".rst").write_lines(api_buf)

    return [
        posixpath.join(templates.globals["docs_dir"], "api",
                       f"{templates.globals['modname']}.rst"),
        posixpath.join(templates.globals["docs_dir"], "index.rst"),
    ]
Esempio n. 2
0
    def run(self) -> bool:
        """
		Run the reformatter.

		:return: Whether the file was changed.
		"""

        quote_formatted_code = reformat_quotes(self._unformatted_source)
        yapfed_code = FormatCode(quote_formatted_code,
                                 style_config=self.yapf_style)[0]
        generic_formatted_code = reformat_generics(yapfed_code)
        # TODO: support spaces

        try:
            isorted_code = StringList(
                isort.code(generic_formatted_code, config=self.isort_config))
        except FileSkipComment:
            isorted_code = StringList(generic_formatted_code)

        isorted_code.blankline(ensure_single=True)

        self._reformatted_source = str(isorted_code)

        # Fix for noqa comments being pushed to new line
        self._reformatted_source = noqa_reformat(self._reformatted_source)

        return self._reformatted_source != self._unformatted_source
Esempio n. 3
0
def configure(app: Sphinx, config: Config):
    """
	Configure :mod:`sphinx_toolbox.code`.

	.. versionadded:: 2.11.0

	:param app: The Sphinx application.
	:param config:
	"""

    latex_elements = getattr(app.config, "latex_elements", {})

    latex_preamble = StringList(latex_elements.get("preamble", ''))
    latex_preamble.blankline()
    latex_preamble.append(r"\definecolor{regex_literal}{HTML}{696969}")
    latex_preamble.append(r"\definecolor{regex_at}{HTML}{FF4500}")
    latex_preamble.append(r"\definecolor{regex_repeat_brace}{HTML}{FF4500}")
    latex_preamble.append(r"\definecolor{regex_branch}{HTML}{FF4500}")
    latex_preamble.append(r"\definecolor{regex_subpattern}{HTML}{1e90ff}")
    latex_preamble.append(r"\definecolor{regex_in}{HTML}{ff8c00}")
    latex_preamble.append(r"\definecolor{regex_category}{HTML}{8fbc8f}")
    latex_preamble.append(r"\definecolor{regex_repeat}{HTML}{FF4500}")
    latex_preamble.append(r"\definecolor{regex_any}{HTML}{FF4500}")

    latex_elements["preamble"] = str(latex_preamble)
    app.config.latex_elements = latex_elements  # type: ignore
    add_nbsp_substitution(config)
def make_installation_instructions(options: Dict[str, Any], env: BuildEnvironment) -> List[str]:
	"""
	Make the content of an installation node.

	:param options:
	:param env: The Sphinx build environment.
	"""

	tabs: Dict[str, List[str]] = _get_installation_instructions(options, env)

	if not tabs:
		warnings.warn("No installation source specified. No installation instructions will be shown.")
		return []

	content = StringList([".. tabs::", ''])
	content.set_indent_type("    ")

	for tab_name, tab_content in tabs.items():
		with content.with_indent_size(1):
			content.append(f".. tab:: {tab_name}")
			content.blankline(ensure_single=True)

		with content.with_indent_size(2):
			content.extend([f"{line}" if line else '' for line in tab_content])

	return list(content)
Esempio n. 5
0
    def make_mypy(self) -> PathPlus:
        """
		Create, update or remove the mypy action, as appropriate.

		.. versionadded:: 2020.1.27
		"""

        ci_file = self.workflows_dir / "mypy.yml"
        template = self.templates.get_template(ci_file.name)
        # TODO: handle case where Linux is not a supported platform

        platforms = set(self.templates.globals["platforms"])
        if "macOS" in platforms:
            platforms.remove("macOS")

        platforms = set(
            filter(None, (platform_ci_names.get(p, None) for p in platforms)))

        dependency_lines = self.get_linux_mypy_requirements()
        linux_platform = platform_ci_names["Linux"]

        if dependency_lines == self.standard_python_install_lines:
            dependencies_block = StringList([
                "- name: Install dependencies 🔧",
                "  run: |",
            ])
            with dependencies_block.with_indent("  ", 2):
                dependencies_block.extend(self.standard_python_install_lines)
        else:
            dependencies_block = StringList([
                "- name: Install dependencies (Linux) 🔧",
                f"  if: ${{{{ matrix.os == '{linux_platform}' && steps.changes.outputs.code == 'true' }}}}",
                "  run: |",
            ])
            with dependencies_block.with_indent("  ", 2):
                dependencies_block.extend(dependency_lines)

            if self.templates.globals["platforms"] != ["Linux"]:
                dependencies_block.blankline(ensure_single=True)
                dependencies_block.extend([
                    "- name: Install dependencies (Win/mac) 🔧",
                    f"  if: ${{{{ matrix.os != '{linux_platform}' && steps.changes.outputs.code == 'true' }}}}",
                    "  run: |",
                ])
                with dependencies_block.with_indent("  ", 2):
                    dependencies_block.extend(
                        self.standard_python_install_lines)

        ci_file.write_clean(
            template.render(
                platforms=sorted(platforms),
                linux_platform=platform_ci_names["Linux"],
                dependencies_block=indent(str(dependencies_block), "      "),
                code_file_filter=self._code_file_filter,
            ))

        return ci_file
Esempio n. 6
0
def sort_requirements(filename: PathLike, allow_git: bool = False) -> int:
    """
	Sort the requirements in the given file alphabetically.

	:param filename: The file to sort the requirements in.
	:param allow_git: Whether to allow lines that start with ``git+``, which are allowed by pip but not :pep:`508`.
	"""

    ret = PASS
    filename = PathPlus(filename)
    comments: List[str]
    requirements: Set[ComparableRequirement]
    git_lines: List[str] = []

    requirements, comments, invalid_lines = read_requirements(
        req_file=filename,
        include_invalid=True,
        normalize_func=normalize_keep_dot,
    )

    for line in invalid_lines:
        if line.startswith("git+") and allow_git:
            git_lines.append(line)
        else:
            ret |= FAIL

    # find and remove pkg-resources==0.0.0
    # which is automatically added by broken pip package under Debian
    if ComparableRequirement("pkg-resources==0.0.0") in requirements:
        requirements.remove(ComparableRequirement("pkg-resources==0.0.0"))
        ret |= FAIL

    sorted_requirements = sorted(requirements)

    buf = StringList(
        [*comments, *git_lines, *[str(req) for req in sorted_requirements]])
    buf.blankline(ensure_single=True)

    if (requirements != sorted_requirements
            and buf != filename.read_lines()) or ret:
        print('\n'.join(buf))
        # print(coloured_diff(
        # 		filename.read_lines(),
        # 		buf,
        # 		str(filename),
        # 		str(filename),
        # 		"(original)",
        # 		"(sorted)",
        # 		lineterm='',
        # 		))
        ret |= FAIL
        filename.write_lines(buf)

    return ret
Esempio n. 7
0
def test_check_file_regression(tmp_pathplus: PathPlus,
                               file_regression: FileRegressionFixture):
    with pytest.raises(FileNotFoundError, match=no_such_file_pattern):
        check_file_output(tmp_pathplus / "file.txt", file_regression)

    check_file_regression("Success!\n\nThis is a test.", file_regression)

    result = StringList("Success!")
    result.blankline()
    result.blankline(ensure_single=True)
    result.append("This is a test.")

    check_file_regression(result, file_regression)
Esempio n. 8
0
def clean_writer(string: str, fp: IO) -> None:
    """
	Write string to ``fp`` without trailing spaces.

	:param string:
	:param fp:
	"""

    # this package
    from domdf_python_tools.stringlist import StringList

    buffer = StringList(string)
    buffer.blankline(ensure_single=True)
    fp.write(str(buffer))
Esempio n. 9
0
	def run(self) -> bool:
		"""
		Run the reformatter.

		:return: Whether the file was changed.
		"""

		hooks = parse_hooks(self.config)
		reformatted_source = StringList(call_hooks(hooks, self._unformatted_source, self.filename))
		reformatted_source.blankline(ensure_single=True)

		self._reformatted_source = str(reformatted_source)

		return self._reformatted_source != self._unformatted_source
Esempio n. 10
0
def create_docs_install_block(
    repo_name: str,
    username: str,
    conda: bool = True,
    pypi: bool = True,
    pypi_name: Optional[str] = None,
    conda_channels: Optional[Sequence[str]] = None,
) -> str:
    """
	Create the installation instructions for insertion into the documentation.

	:param repo_name: The name of the GitHub repository.
	:param username: The username of the GitHub account that owns the repository.
		(Not used; ensures API compatibility with :func:`~.create_readme_install_block`)
	:param conda: Whether to show Anaconda installation instructions.
	:param pypi: Whether to show PyPI installation instructions.
	:param pypi_name: The name of the project on PyPI. Defaults to the value of ``repo_name`` if unset.
	:param conda_channels: List of required Conda channels.

	:return: The installation block created from the above settings.
	"""

    if not conda_channels and conda:
        raise ValueError(
            "Please supply a list of 'conda_channels' if Conda builds are supported"
        )

    if not pypi_name:
        pypi_name = repo_name

    conda_channels = DelimitedList(conda_channels or [])

    block = StringList(
        [".. start installation", '', f".. installation:: {pypi_name}"])

    with block.with_indent_size(1):

        if pypi:
            block.append(":pypi:")

        block.append(":github:")

        if conda:
            block.append(":anaconda:")
            block.append(f":conda-channels: {conda_channels:, }")

    block.blankline()
    block.append(".. end installation")

    return str(block)
Esempio n. 11
0
def format_signature(obj: Union[type, FunctionType]) -> StringList:
    """
	Format the signature of the given object, for insertion into the highlight panel.

	:param obj:

	:return: A list of reStructuredText lines.
	"""

    with monkeypatcher():
        obj.__annotations__ = get_type_hints(obj)

    signature: inspect.Signature = inspect.signature(obj)

    buf = StringList(".. parsed-literal::")
    buf.blankline()
    buf.indent_type = "    "
    buf.indent_size = 1

    if signature.return_annotation is not inspect.Signature.empty and not isinstance(
            obj, type):
        return_annotation = f") -> {format_annotation(signature.return_annotation)}"
    else:
        return_annotation = f")"

    total_length = len(obj.__name__) + len(return_annotation)

    arguments_buf: DelimitedList[str] = DelimitedList()

    param: inspect.Parameter
    for param in signature.parameters.values():
        arguments_buf.append(f"{format_parameter(param)}")
        total_length += len(arguments_buf[-1])

    if total_length <= 60:
        signature_buf = StringList(''.join(
            [f"{obj.__name__}(", f"{arguments_buf:, }", return_annotation]))
    else:
        signature_buf = StringList([f"{obj.__name__}("])
        signature_buf.indent_type = "  "
        with signature_buf.with_indent_size(1):
            signature_buf.extend(
                [f"{arguments_buf:,\n}" + ',', return_annotation])

    buf.extend(signature_buf)

    return buf
    def run(self) -> List[nodes.Node]:
        """
		Create the installation node.
		"""

        if self.env.docname != self.env.config.master_doc:  # pragma: no cover
            warnings.warn(
                "The 'sidebar-links' directive can only be used on the Sphinx master doc. "
                "No links will be shown.",
                UserWarning,
            )
            return []

        body = StringList([
            ".. toctree::",
            "    :hidden:",
        ])

        with body.with_indent("    ", 1):
            if "caption" in self.options:
                body.append(f":caption: {self.options['caption']}")
            else:  # pragma: no cover
                body.append(":caption: Links")

            body.blankline()

            if "github" in self.options:
                body.append(self.process_github_option())
            if "pypi" in self.options:
                body.append(
                    f"PyPI <https://pypi.org/project/{self.options['pypi']}>")

            body.extend(self.content)

        body.blankline()
        body.blankline()

        only_node = addnodes.only(expr="html")
        content_node = nodes.paragraph(rawsource=str(body))
        only_node += content_node
        self.state.nested_parse(docutils.statemachine.StringList(body),
                                self.content_offset, content_node)

        return [only_node]
Esempio n. 13
0
def configure(app: Sphinx, config: Config):
    """
	Configure :mod:`sphinx_toolbox.code`.

	.. versionadded:: 2.9.0

	:param app: The Sphinx application.
	:param config:
	"""

    latex_elements = getattr(config, "latex_elements", {})

    latex_preamble = StringList(latex_elements.get("preamble", ''))
    latex_preamble.blankline()
    latex_preamble.append(r"\definecolor{nbsphinxin}{HTML}{307FC1}")
    latex_preamble.append(r"\definecolor{nbsphinxout}{HTML}{BF5B3D}")

    latex_elements["preamble"] = str(latex_preamble)
    config.latex_elements = latex_elements  # type: ignore
Esempio n. 14
0
    def check_modules(self) -> Iterator[Tuple[str, int]]:
        """
		Checks modules can be imported.

		:returns: An iterator of 2-element tuples comprising the name of the module and the import status:

			0. The module was imported successfully.
			1. The module could not be imported. If :attr:`~.show` is :py:obj:`True` the traceback will be shown.
		"""

        longest_name = 15
        echo = functools.partial(click.echo,
                                 color=resolve_color_default(self.colour))

        if self.modules:
            longest_name += max(map(len, self.modules))
        else:
            return

        for module_name in self.modules:
            echo(Style.BRIGHT(f"Checking {module_name!r}".ljust(
                longest_name, '.')),
                 nl=False)

            ret = check_module(module_name, combine_output=True)

            if ret:
                echo(Back.RED("Failed"))
                self.stats["failed"] += 1  # pylint: disable=loop-invariant-statement

                if self.show:
                    echo(Style.BRIGHT("Captured output:"))
                    stdout = StringList(ret.stdout)
                    stdout.blankline(ensure_single=True)
                    echo(stdout)

                yield module_name, 1

            else:
                echo(Back.GREEN("Passed"))
                self.stats["passed"] += 1  # pylint: disable=loop-invariant-statement
                yield module_name, 0
Esempio n. 15
0
def append_doctring_from_another(target: Union[Type, Callable],
                                 original: Union[Type, Callable]):
    """
	Sets the docstring of the ``target`` function to that of the ``original`` function.

	This may be useful for subclasses or wrappers that use the same arguments.

	Any indentation in either docstring is removed to
	ensure consistent indentation between the two docstrings.
	Bear this in mind if additional indentation is used in the docstring.

	:param target: The object to append the docstring to
	:param original: The object to copy the docstring from
	"""

    # this package
    from domdf_python_tools.stringlist import StringList

    target_doc = target.__doc__
    original_doc = original.__doc__

    if isinstance(original_doc, str) and isinstance(target_doc, str):
        docstring = StringList(cleandoc(target_doc))
        docstring.blankline(ensure_single=True)
        docstring.append(cleandoc(original_doc))
        docstring.blankline(ensure_single=True)
        target.__doc__ = str(docstring)

    elif not isinstance(target_doc, str) and isinstance(original_doc, str):
        docstring = StringList(cleandoc(original_doc))
        docstring.blankline(ensure_single=True)
        target.__doc__ = str(docstring)
Esempio n. 16
0
def make_pr_details() -> str:
	"""
	Returns the body of a pull request.
	"""

	buf = StringList()
	buf.extend([
			"<details>",
			"  <summary>Commands</summary>",
			'',
			"  * `@repo-helper recreate` will recreate the pull request by checking"
			" out the current master branch and running `repo-helper` on that.",
			"</details>",
			])

	buf.blankline(ensure_single=True)

	buf.append("---")
	buf.blankline(ensure_single=True)

	buf.append(make_footer_links("repo-helper", "repo-helper-bot", event_date=date.today(), type="app"))
	return str(buf)
Esempio n. 17
0
def parse_yaml(repo_path: PathLike, allow_unknown_keys: bool = False) -> Dict:
    """
	Parse configuration values from ``repo_helper.yml``.

	:param repo_path: Path to the repository root.
	:param allow_unknown_keys: Whether unknown keys should be allowed in the configuration file.

	:returns: Mapping of configuration keys to values.

	.. versionchanged:: 2021.2.18  Added the ``allow_unknown_keys`` argument.
	"""

    repo_path = PathPlus(repo_path)

    if (repo_path / "git_helper.yml").is_file():
        (repo_path / "git_helper.yml").rename(repo_path / "repo_helper.yml")

    config_file = repo_path / "repo_helper.yml"

    if not config_file.is_file():
        raise FileNotFoundError(f"'repo_helper.yml' not found in {repo_path}")

    content_lines = config_file.read_lines()

    lines_without_removed_keys = StringList(
        itertools.filterfalse(
            _REMOVED_KEYS_RE.match,  # type: ignore
            content_lines,
        ), )
    lines_without_removed_keys.blankline(ensure_single=True)

    if lines_without_removed_keys != content_lines:
        config_file.write_lines(lines_without_removed_keys)

    parser = RepoHelperParser(allow_unknown_keys=allow_unknown_keys)
    return parser.run(config_file)
Esempio n. 18
0
	def create_body_overloads(self) -> StringList:
		"""
		Create the overloaded implementations for insertion into to the body of the documenter's output.
		"""

		output = StringList()
		formatted_overloads = []

		output.blankline()
		# output.append(":Overloaded Implementations:")
		output.append(":Overloads:")
		output.blankline()

		# Size varies depending on docutils config
		output.indent_type = ' '
		output.indent_size = self.env.app.config.docutils_tab_width  # type: ignore

		if self.analyzer and '.'.join(self.objpath) in self.analyzer.overloads:

			for overload in self.analyzer.overloads.get('.'.join(self.objpath)):  # type: ignore
				overload = self.process_overload_signature(overload)

				buf = [format_annotation(self.object), r"\("]

				for name, param in overload.parameters.items():
					buf.append(f"**{name}**")
					if param.annotation is not Parameter.empty:
						buf.append(r"\: ")
						buf.append(format_annotation(param.annotation))
					if param.default is not Parameter.empty:
						buf.append(" = ")
						buf.append(param.default)
					buf.append(r"\, ")

				if buf[-2][-1] != '`':
					buf[-1] = r" )"
				else:
					buf[-1] = r")"

				if overload.return_annotation is not Parameter.empty:
					buf.append(" -> ")
					buf.append(format_annotation(overload.return_annotation))

				formatted_overloads.append(''.join(buf))

			if len(formatted_overloads) == 1:
				output.append(formatted_overloads[0])
			else:
				for line in formatted_overloads:
					output.append(f"* {line}")
					output.blankline(ensure_single=True)

			return output

		return StringList()
Esempio n. 19
0
def make_property(buf: StringList, name: str) -> None:
    """
	Add the signature of a property to the given
	:class:`domdf_python_tools.stringlist.StringList`.

	:param buf:
	:param name:
	"""  # noqa: D400

    with buf.with_indent_size(buf.indent_size + 1):
        buf.blankline(ensure_single=True)
        buf.append(f"@property\ndef {name}(self): ...")
        buf.blankline(ensure_single=True)

    with buf.with_indent_size(buf.indent_size + 1):
        buf.blankline(ensure_single=True)
        buf.append(f"@{name}.setter\ndef {name}(self, value): ...")
        buf.blankline(ensure_single=True)
def make_rest_example(
    options: Dict[str, Any],
    env: sphinx.environment.BuildEnvironment,
    content: Sequence[str],
) -> List[str]:
    """
	Make the content of a reST Example node.

	:param options:
	:param content: The user-provided content of the directive.
	"""

    output = StringList(".. container:: rest-example")
    output.indent_type = ' ' * env.config.docutils_tab_width

    output.blankline()

    with output.with_indent_size(1):

        output.append(".. code-block:: rest")

        with output.with_indent_size(2):
            for option, value in options.items():
                if value is None:
                    output.append(f":{option}:")
                else:
                    output.append(f":{option}: {value}")

            output.blankline()

            for line in content:
                output.append(line)

        output.blankline(ensure_single=True)

        for line in content:
            output.append(line)

        output.blankline(ensure_single=True)

    return list(output)
Esempio n. 21
0
    def run_generic(self) -> List[nodes.Node]:
        """
		Generate generic reStructuredText output.
		"""

        content = StringList()
        content.indent_type = ' '

        for obj_name in get_random_sample(sorted(set(self.content))):
            if self.options.get("module", '') and obj_name.startswith('.'):
                obj_name = obj_name.replace('.', f"{self.options['module']}.",
                                            1)

            name_parts = obj_name.split('.')
            module = import_module('.'.join(name_parts[:-1]))
            obj = getattr(module, name_parts[-1])

            if isinstance(obj, FunctionType):
                content.append(
                    f"* :func:`{'.'.join(name_parts[1:])}() <.{obj_name}>`")
            elif isinstance(obj, type):
                content.append(
                    f"* :class:`{'.'.join(name_parts[1:])} <.{obj_name}>`")
            else:
                content.append(
                    f"* :py:obj:`{'.'.join(name_parts[1:])} <.{obj_name}>`")

            with content.with_indent_size(2):
                content.blankline()
                content.append(format_signature(obj))
                content.blankline()
                content.append(
                    inspect.cleandoc(obj.__doc__ or '').split("\n\n")[0])
                content.blankline()

        targetid = f'sphinx-highlights-{self.env.new_serialno("sphinx-highlights"):d}'
        targetnode = nodes.target('', '', ids=[targetid])

        view = ViewList(content)
        body_node = nodes.container(rawsource=str(content))
        self.state.nested_parse(view, self.content_offset,
                                body_node)  # type: ignore

        sphinx_highlights_purger.add_node(self.env, body_node, targetnode,
                                          self.lineno)

        return [targetnode, body_node]
Esempio n. 22
0
def add_autosummary(self):
	"""
	Add the :rst:dir:`autosummary` table of this documenter.
	"""

	if not self.options.get("autosummary", False):
		return

	content = StringList()
	content.indent_type = ' ' * 4
	sourcename = self.get_sourcename()
	grouped_documenters = self.get_grouped_documenters()

	for section, documenters in grouped_documenters.items():
		if not self.options.get("autosummary-no-titles", False):
			content.append(f"**{section}:**")

		content.blankline(ensure_single=True)

		content.append(".. autosummary::")
		content.blankline(ensure_single=True)

		member_order = get_first_matching(
				lambda x: x != "groupwise",
				[
						self.options.get("member-order", ''),
						self.env.config.autodocsumm_member_order,
						self.env.config.autodoc_member_order,
						],
				default="alphabetical",
				)

		with content.with_indent_size(content.indent_size + 1):
			for documenter, _ in self.sort_members(documenters, member_order):
				content.append(f"~{documenter.fullname}")

		content.blankline()

	for line in content:
		self.add_line(line, sourcename)
Esempio n. 23
0
def stubs(add: Optional[bool] = None,
          force_tty: bool = False,
          no_pager: bool = False):
    """
	Suggest :pep:`561` type stubs.
	"""

    # stdlib
    import shutil
    import sys
    from itertools import chain

    # 3rd party
    import tabulate
    from apeye import URL
    from apeye.requests_url import TrailingRequestsURL
    from domdf_python_tools.paths import PathPlus
    from domdf_python_tools.stringlist import StringList
    from shippinglabel import normalize
    from shippinglabel.pypi import PYPI_API
    from shippinglabel.requirements import combine_requirements, read_requirements

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()
    config = rh.templates.globals

    requirements_files = [rh.target_repo / "requirements.txt"]

    if config["enable_tests"]:
        requirements_files.append(rh.target_repo / config["tests_dir"] /
                                  "requirements.txt")

    requirements_files.extend(
        (rh.target_repo /
         config["import_name"]).iterchildren("**/requirements.txt"))

    all_requirements = set(
        chain.from_iterable(
            read_requirements(file, include_invalid=True)[0]
            for file in requirements_files))

    stubs_file = rh.target_repo / "stubs.txt"

    if stubs_file.is_file():
        existing_stubs, stub_comments, invalid_stubs = read_requirements(
            stubs_file, include_invalid=True)
    else:
        existing_stubs = set()
        stub_comments, invalid_stubs = [], []

    suggestions = {}

    for requirement in all_requirements:
        if normalize(requirement.name) in {"typing-extensions"}:
            continue

        types_url = TrailingRequestsURL(
            PYPI_API / f"types-{requirement.name.lower()}" / "json/")
        stubs_url = TrailingRequestsURL(
            PYPI_API / f"{requirement.name.lower()}-stubs" / "json/")

        response = stubs_url.head()
        if response.status_code == 404:
            # No stubs found for -stubs
            response = types_url.head()
            if response.status_code == 404:
                # No stubs found for types-
                continue
            else:
                response_url = URL(response.url)
                suggestions[str(requirement)] = response_url.parent.name
                # print(requirement, response.url)
        else:
            response_url = URL(response.url)
            suggestions[str(requirement)] = response_url.parent.name
            # print(requirement, response.url)

    if not suggestions:
        if sys.stdout.isatty() or force_tty:
            click.echo("No stubs to suggest.")
        sys.exit(1)

    if sys.stdout.isatty() or force_tty:

        table = StringList([
            "Suggestions",
            "-----------",
            tabulate.tabulate(suggestions.items(),
                              headers=["Requirement", "Stubs"]),
        ])
        table.blankline(ensure_single=True)

        if no_pager or len(table) <= shutil.get_terminal_size().lines:
            click.echo('\n'.join(table))
        else:
            click.echo_via_pager('\n'.join(table))

        if add is None:
            add = confirm("Do you want to add these to the 'stubs.txt' file?")

        if add:
            new_stubs = sorted(
                combine_requirements(*existing_stubs, *suggestions.values()))

            stubs_file.write_lines([
                *stub_comments,
                *invalid_stubs,
                *map(str, new_stubs),
            ])

    else:
        for stub in suggestions.values():
            click.echo(stub)

    sys.exit(0)
Esempio n. 24
0
    def make(self) -> StringList:
        """
		Constructs the contents of the shields block.
		"""

        buf = StringList()
        sections = {}
        substitutions = {}

        repo_name = self.repo_name
        username = self.username
        pypi_name = self.pypi_name

        if self.unique_name:
            buf.append(f".. start shields {self.unique_name.lstrip('_')}")
        else:
            buf.append(f".. start shields")

        buf.blankline(ensure_single=True)

        buf.extend(
            [".. list-table::", "\t:stub-columns: 1", "\t:widths: 10 90"])
        buf.blankline(ensure_single=True)

        sections["Activity"] = [
            "commits-latest", "commits-since", "maintained"
        ]
        substitutions["commits-since"] = self.make_activity_shield(
            repo_name, username, self.version)
        substitutions["commits-latest"] = self.make_last_commit_shield(
            repo_name, username)
        substitutions["maintained"] = self.make_maintained_shield()

        sections["Other"] = ["license", "language", "requires"]
        substitutions["requires"] = self.make_requires_shield(
            repo_name, username)
        substitutions["license"] = self.make_license_shield(
            repo_name, username)
        substitutions["language"] = self.make_language_shield(
            repo_name, username)

        sections["QA"] = ["codefactor", "actions_flake8", "actions_mypy"]
        substitutions["codefactor"] = self.make_codefactor_shield(
            repo_name, username)
        substitutions["actions_flake8"] = self.make_actions_shield(
            repo_name, username, "Flake8", "Flake8 Status")
        substitutions["actions_mypy"] = self.make_actions_shield(
            repo_name, username, "mypy", "mypy status")

        if self.docs:
            sections["Docs"] = ["docs", "docs_check"]
            substitutions["docs"] = self.make_rtfd_shield(
                repo_name, self.docs_url)
            substitutions["docs_check"] = self.make_docs_check_shield(
                repo_name, username)

        sections["Tests"] = []

        if "Linux" in self.platforms:
            sections["Tests"].append("actions_linux")
            substitutions["actions_linux"] = self.make_actions_shield(
                repo_name,
                username,
                "Linux",
                "Linux Test Status",
            )
        if "Windows" in self.platforms:
            sections["Tests"].append("actions_windows")
            substitutions["actions_windows"] = self.make_actions_shield(
                repo_name,
                username,
                "Windows",
                "Windows Test Status",
            )
        if "macOS" in self.platforms:
            sections["Tests"].append("actions_macos")
            substitutions["actions_macos"] = self.make_actions_shield(
                repo_name,
                username,
                "macOS",
                "macOS Test Status",
            )

        if self.tests:
            sections["Tests"].append("coveralls")
            substitutions["coveralls"] = self.make_coveralls_shield(
                repo_name, username)

        if self.on_pypi:
            sections["PyPI"] = [
                "pypi-version", "supported-versions",
                "supported-implementations", "wheel"
            ]
            substitutions["pypi-version"] = self.make_pypi_version_shield(
                pypi_name)
            substitutions[
                "supported-versions"] = self.make_python_versions_shield(
                    pypi_name)
            substitutions[
                "supported-implementations"] = self.make_python_implementations_shield(
                    pypi_name)
            substitutions["wheel"] = self.make_wheel_shield(pypi_name)

            sections["Activity"].append("pypi-downloads")
            substitutions["pypi-downloads"] = self.make_pypi_downloads_shield(
                pypi_name)

        if self.conda:
            sections["Anaconda"] = ["conda-version", "conda-platform"]
            substitutions["conda-version"] = self.make_conda_version_shield(
                pypi_name, self.primary_conda_channel)
            substitutions["conda-platform"] = self.make_conda_platform_shield(
                pypi_name, self.primary_conda_channel)

        if self.docker_shields:
            docker_name = self.docker_name
            sections["Docker"] = [
                "docker_build", "docker_automated", "docker_size"
            ]
            substitutions[
                "docker_build"] = self.make_docker_build_status_shield(
                    docker_name, username)
            substitutions[
                "docker_automated"] = self.make_docker_automated_build_shield(
                    docker_name, username)
            substitutions["docker_size"] = self.make_docker_size_shield(
                docker_name, username)

        for section in self.sections:
            if section not in sections or not sections[section]:
                continue

            images = DelimitedList(
                [f"|{name}{self.unique_name}|" for name in sections[section]])
            buf.extend([f"	* - {section}", f"	  - {images: }"])

        for sub_name in self.substitutions:
            if sub_name not in substitutions:
                continue

            buf.blankline(ensure_single=True)
            buf.append(
                f".. |{sub_name}{self.unique_name}| {substitutions[sub_name][3:]}"
            )

        buf.blankline(ensure_single=True)

        buf.append(".. end shields")
        # buf.blankline(ensure_single=True)

        return buf
Esempio n. 25
0
def walk_attrs(module: ModuleType, attr_name, converter=Converter()) -> str:
    """
	Create stubs for given class, including all attributes.

	:param module:
	:param attr_name:
	:param converter:

	:return:
	"""

    buf = StringList(convert_indents=True)
    buf.indent_type = "    "

    if not is_dunder(attr_name):
        obj = getattr(module, attr_name)

        # TODO: case where obj is not a class
        if not isinstance(obj, FunctionType):
            bases = []
            for base in obj.__bases__:
                if base not in {System.Object, object}:
                    if base.__name__ in converter.type_mapping:
                        bases.append(converter.type_mapping[base.__name__])
                    else:
                        bases.append(base.__name__)

            bases = list(filter(lambda x: x is Any, bases))

            if bases:
                buf.append(f"class {attr_name}({', '.join(bases)}):\n")
            else:
                buf.append(f"class {attr_name}:\n")

            for child_attr_name in get_child_attrs(obj):
                try:
                    child_obj = getattr(obj, child_attr_name)
                except TypeError as e:
                    if str(e) in {
                            "instance property must be accessed through a class instance",
                            "property cannot be read",
                    }:

                        make_property(buf, child_attr_name)
                        continue

                    elif str(
                            e
                    ) == "instance attribute must be accessed through a class instance":
                        print(
                            f"{e.__class__.__name__}: '{e}' occurred for {attr_name}.{child_attr_name}"
                        )
                        continue

                    else:
                        raise e

                # TODO: if isinstance(child_obj, FunctionType):

                return_type, arguments = get_signature(child_obj,
                                                       child_attr_name,
                                                       converter)

                with buf.with_indent_size(buf.indent_size + 1):

                    if arguments is not None and arguments:
                        signature = []

                        for idx, argument in enumerate(arguments.split(", ")):
                            signature.append(
                                f"{'_' * (idx + 1)}: {converter.convert_type(argument)}"
                            )

                        line = f"def {child_attr_name}(self, {', '.join(signature)}) -> {return_type}: ..."

                        if len(line) > 88:
                            buf.blankline(ensure_single=True)
                            buf.append(f"def {child_attr_name}(")

                            with buf.with_indent_size(buf.indent_size + 2):
                                buf.append("self,")
                                for line in signature:
                                    buf.append(f"{line},")
                                buf.append(f") -> {return_type}: ...\n")
                        else:
                            buf.append(line)

                    elif arguments is None:
                        buf.append(
                            f"def {child_attr_name}(self, *args, **kwargs) -> {return_type}: ..."
                        )

                    elif not arguments:
                        # i.e. takes no arguments
                        buf.append(
                            f"def {child_attr_name}(self) -> {return_type}: ..."
                        )

        buf.blankline(ensure_single=True)
        return str(buf)

    return ''
Esempio n. 26
0
def make_module(
    name: str,
    module: ModuleType,
    attr_list: Iterable[str] = (),
    first_party_imports: Iterable[str] = (),
    converter=Converter()
) -> bool:
    """
	Create type stubs for a module.

	:param name: The name of the module.
	:param module: The module object.
	:param attr_list: A list of attributes to create stubs for.
	:param first_party_imports: A list of first-party imports to include at the top of the file.
	"""

    buf = StringList()
    path = name.split('.')

    stubs_dir = PathPlus(f"{path[0]}-stubs")
    stubs_dir.maybe_make()
    (stubs_dir / '/'.join(x for x in path[1:-1])).maybe_make(parents=True)
    stub_file = stubs_dir / '/'.join(x for x in path[1:-1]) / f"{path[-1]}.pyi"

    import_name = name.replace(".__init__", '')

    for imp in (*make_imports(name), *first_party_imports):
        imp = re.sub(
            fr"import {import_name}\.([A-Za-z_]+)\.([A-Za-z_]+)\.([A-Za-z_]+)",
            r"from .\1.\2 import \3", imp)
        imp = re.sub(fr"import {import_name}\.([A-Za-z_]+)\.([A-Za-z_]+)",
                     r"from .\1 import \2", imp)
        imp = re.sub(fr"import {import_name}\.([A-Za-z_]+)",
                     r"from . import \1", imp)
        imp = re.sub(fr"import {import_name}$", '', imp)
        buf.append(imp)

    if import_name != "System.ComponentModel":
        if import_name == "System":
            buf.append("from .ComponentModel import MarshalByValueComponent")
        else:
            buf.append(
                "from System.ComponentModel import MarshalByValueComponent")

    for attr_name in dedup(attr_list):
        stub_code = walk_attrs(module, attr_name, converter=converter)
        stub_code = stub_code.replace(f": {import_name}.", ": ")
        stub_code = stub_code.replace(f" -> {import_name}.", " -> ")
        stub_code = stub_code.replace(f"[{import_name}.", '[')
        stub_code.replace(
            "System.Collections.Generic.IDictionary[System.String,System.String]",
            "Any")

        buf.blankline(ensure_single=True)
        buf.blankline()

        buf.append(stub_code)

    sorted_code = isort.code(str(buf), config=isort_config)
    sans_unneeded_imports = fix_code(
        sorted_code,
        additional_imports=None,
        expand_star_imports=False,
        remove_all_unused_imports=False,
        remove_duplicate_keys=False,
        remove_unused_variables=False,
        ignore_init_module_imports=False,
    )

    stub_file.write_text(sans_unneeded_imports)

    return True
Esempio n. 27
0
def _reformat_blocks(blocks: List[List[str]]):

    cursor = 1

    while cursor < len(blocks):

        if isinstance(blocks[cursor - 1],
                      (_MultilineFunction, _DecoratedFunction, _Class)):
            # Add a blank line after _Variables, a multi-line function, or a decorated function
            blocks.insert(cursor, [])
            cursor += 1

        if blocks[cursor] and blocks[cursor - 1] and re.match(
                "^[ \t]+", blocks[cursor - 1][-1]) and not re.match(
                    "^[ \t]+", blocks[cursor][0]):
            # Add a blank line after a dedent
            blocks.insert(cursor, [])
            cursor += 1

        if isinstance(blocks[cursor - 1], _Variables):
            # Add a blank line before and after _Variables
            blocks.insert(cursor - 1, [])
            blocks.insert(cursor + 1, [])
            cursor += 2

        if isinstance(blocks[cursor], _Variables):
            # Add a blank line before and after _Variables
            blocks.insert(cursor, [])
            blocks.insert(cursor + 2, [])
            cursor += 2

        if isinstance(blocks[cursor],
                      (_DecoratedFunction, _MultilineFunction)):
            # Add a blank line before a decorated function
            blocks.insert(cursor, [])
            cursor += 1

        if isinstance(blocks[cursor], _Class):

            if (cursor + 1 < len(blocks)
                    and isinstance(blocks[cursor + 1], _Function) and
                    not isinstance(blocks[cursor + 1],
                                   (_DecoratedFunction, _MultilineFunction))
                    and blocks[cursor][-1].lstrip().startswith("class")
                    and blocks[cursor + 1][0][0].isspace()):
                blocks.insert(cursor, [])
                cursor += 2
            else:
                blocks.insert(cursor, [])
                blocks.insert(cursor + 2, [])
                cursor += 3

        cursor += 1

    output = StringList()

    # Remove trailing whitespace from each block
    for block in blocks:
        if output and not block and not output[-1]:
            # Remove duplicate new lines
            continue

        output.append('\n'.join(block).rstrip())

    if not output[0]:
        output.pop(0)

    output.blankline(ensure_single=True)

    return output
Esempio n. 28
0
def coloured_diff(
    a: Sequence[str],
    b: Sequence[str],
    fromfile: str = '',
    tofile: str = '',
    fromfiledate: str = '',
    tofiledate: str = '',
    n: int = 3,
    lineterm: str = '\n',
    removed_colour: terminal_colours.Colour = terminal_colours.Fore.RED,
    added_colour: terminal_colours.Colour = terminal_colours.Fore.GREEN,
) -> str:
    r"""
	Compare two sequences of lines; generate the delta as a unified diff.

	Unified diffs are a compact way of showing line changes and a few
	lines of context. The number of context lines is set by ``n`` which
	defaults to three.

	By default, the diff control lines (those with ``---``, ``+++``, or ``@@``)
	are created with a trailing newline. This is helpful so that inputs
	created from ``file.readlines()`` result in diffs that are suitable for
	``file.writelines()`` since both the inputs and outputs have trailing
	newlines.

	For inputs that do not have trailing newlines, set the lineterm
	argument to ``''`` so that the output will be uniformly newline free.

	The unidiff format normally has a header for filenames and modification
	times. Any or all of these may be specified using strings for
	``fromfile``, ``tofile``, ``fromfiledate``, and ``tofiledate``.
	The modification times are normally expressed in the ISO 8601 format.

	.. versionadded:: 0.3.0
	.. latex:clearpage::

	**Example:**

	>>> for line in coloured_diff(
	...     'one two three four'.split(),
	...     'zero one tree four'.split(), 'Original', 'Current',
	...     '2005-01-26 23:30:50', '2010-04-02 10:20:52',
	...     lineterm='',
	...     ):
	...     print(line)                 # doctest: +NORMALIZE_WHITESPACE
	--- Original        2005-01-26 23:30:50
	+++ Current         2010-04-02 10:20:52
	@@ -1,4 +1,4 @@
	+zero
	one
	-two
	-three
	+tree
	four

	:param a:
	:param b:
	:param fromfile:
	:param tofile:
	:param fromfiledate:
	:param tofiledate:
	:param n:
	:param lineterm:
	:param removed_colour: The :class:`~consolekit.terminal_colours.Colour` to use for lines that were removed.
	:param added_colour: The :class:`~consolekit.terminal_colours.Colour` to use for lines that were added.
	"""

    buf = StringList()
    diff = difflib.unified_diff(a, b, fromfile, tofile, fromfiledate,
                                tofiledate, n, lineterm)

    for line in diff:
        if line.startswith('+'):
            buf.append(added_colour(line))
        elif line.startswith('-'):
            buf.append(removed_colour(line))
        else:
            buf.append(line)

    buf.blankline(ensure_single=True)

    return str(buf)
Esempio n. 29
0
def make_pre_commit(repo_path: pathlib.Path,
                    templates: Environment) -> List[str]:
    """
	Add configuration for ``pre-commit``.

	https://github.com/pre-commit/pre-commit

	# See https://pre-commit.com for more information
	# See https://pre-commit.com/hooks.html for more hooks

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    docs_dir = templates.globals["docs_dir"]
    import_name = templates.globals["import_name"]
    stubs_package = templates.globals["stubs_package"]

    non_source_files = [
        posixpath.join(docs_dir, "conf"), "__pkginfo__", "setup"
    ]

    domdfcoding_hooks = Repo(
        repo=make_github_url("domdfcoding", "pre-commit-hooks"),
        rev="v0.3.0",
        hooks=[
            {
                "id": "requirements-txt-sorter",
                "args": ["--allow-git"]
            },
            {
                "id":
                "check-docstring-first",
                "exclude":
                fr"^({'|'.join(non_source_files)}|{templates.globals['tests_dir']}/.*)\.py$"
            },
            "bind-requirements",
        ])

    flake8_dunder_all = Repo(
        repo=make_github_url("domdfcoding", "flake8-dunder-all"),
        rev="v0.1.8",
        hooks=[{
            "id":
            "ensure-dunder-all",
            "files":
            fr"^{import_name}{'-stubs' if stubs_package else ''}/.*\.py$"
        }])

    snippet_fmt = Repo(
        repo=make_github_url("python-formate", "snippet-fmt"),
        rev="v0.1.4",
        hooks=["snippet-fmt"],
    )

    formate_excludes = fr"^({'|'.join([*templates.globals['yapf_exclude'], *non_source_files])})\.(_)?py$"

    formate = Repo(
        repo=make_github_url("python-formate", "formate"),
        rev="v0.4.9",
        hooks=[{
            "id": "formate",
            "exclude": formate_excludes
        }],
    )

    dep_checker_args = [templates.globals["import_name"].replace('.', '/')]

    if templates.globals["source_dir"]:
        dep_checker_args.extend(
            ["--work-dir", templates.globals["source_dir"]])

    dep_checker = Repo(repo=make_github_url("domdfcoding", "dep_checker"),
                       rev="v0.6.2",
                       hooks=[{
                           "id": "dep_checker",
                           "args": dep_checker_args
                       }])

    pre_commit_file = PathPlus(repo_path / ".pre-commit-config.yaml")

    if not pre_commit_file.is_file():
        pre_commit_file.touch()

    dumper = ruamel.yaml.YAML()
    dumper.indent(mapping=2, sequence=3, offset=1)

    output = StringList([
        f"# {templates.globals['managed_message']}",
        "---",
        '',
        f"exclude: {templates.globals['pre_commit_exclude']}",
        '',
        "repos:",
    ])

    indent_re = re.compile("^ {3}")

    managed_hooks = [
        pyproject_parser,
        pre_commit_hooks,
        domdfcoding_hooks,
        flake8_dunder_all,
        flake2lint,
        pygrep_hooks,
        pyupgrade,
        lucas_c_hooks,
        snippet_fmt,
        formate,
    ]

    if not templates.globals["stubs_package"]:
        managed_hooks.append(dep_checker)

    managed_hooks_urls = [str(hook.repo) for hook in managed_hooks]

    custom_hooks_comment = "# Custom hooks can be added below this comment"

    for hook in managed_hooks:
        buf = StringIO()
        dumper.dump(hook.to_dict(), buf)
        output.append(indent_re.sub(" - ", indent(buf.getvalue(), "   ")))
        output.blankline(ensure_single=True)
    output.append(custom_hooks_comment)
    output.blankline(ensure_single=True)

    raw_yaml = pre_commit_file.read_text()

    if custom_hooks_comment in raw_yaml:
        custom_hooks_yaml = pre_commit_file.read_text().split(
            custom_hooks_comment)[1]

        custom_hooks = []
        local_hooks = []

        for repo in yaml_safe_loader.load(custom_hooks_yaml) or []:
            if repo["repo"] == "local":
                local_hooks.append(repo)

            elif repo["repo"] not in managed_hooks_urls:
                custom_hooks.append(Repo(**repo))

        for hook in custom_hooks:
            buf = StringIO()
            dumper.dump(hook.to_dict(), buf)
            output.append(indent_re.sub(" - ", indent(buf.getvalue(), "   ")))
            output.blankline(ensure_single=True)

        for hook in local_hooks:
            buf = StringIO()
            dumper.dump(hook, buf)
            output.append(indent_re.sub(" - ", indent(buf.getvalue(), "   ")))
            output.blankline(ensure_single=True)

    pre_commit_file.write_lines(output)

    return [pre_commit_file.name]
Esempio n. 30
0
    def make_documentation(cls):
        """
		Returns the reStructuredText documentation for the :class:`~.ConfigVar`.
		"""

        docstring = cls.__doc__ or ''
        docstring = (indent(dedent(docstring), tab))

        if not docstring.startswith('\n'):
            docstring = '\n' + docstring

        buf = StringList()
        buf.indent_type = "    "
        buf.blankline(ensure_single=True)
        buf.append(f".. conf:: {cls.__name__}")
        buf.append(docstring)
        buf.blankline()

        buf.indent_size += 1

        buf.append(f"**Required**: {'yes' if cls.required else 'no'}")
        buf.blankline()
        buf.blankline()

        if not cls.required:
            if cls.default == []:
                buf.append("**Default**: [ ]")
            elif cls.default == {}:
                buf.append("**Default**: { }")
            elif isinstance(cls.default, Callable):  # type: ignore
                buf.append(
                    f"**Default**: The value of :conf:`{cls.default.__name__}`"
                )
            elif isinstance(cls.default, bool):
                buf.append(f"**Default**: :py:obj:`{cls.default}`")
            elif isinstance(cls.default, str):
                if cls.default == '':
                    buf.append("**Default**: <blank>")
                else:
                    buf.append(f"**Default**: ``{cls.default}``")
            else:
                buf.append(f"**Default**: {cls.default}")

            buf.blankline()
            buf.blankline()

        buf.append(f"**Type**: {get_yaml_type(cls.dtype)}")

        if is_literal_type(cls.dtype):
            valid_values = ", ".join(f"``{x}``" for x in cls.dtype.__args__)
            buf.blankline()
            buf.blankline()
            buf.append(f"**Allowed values**: {valid_values}")
        elif hasattr(cls.dtype, "__args__") and is_literal_type(
                cls.dtype.__args__[0]):
            valid_values = ", ".join(f"``{x}``"
                                     for x in cls.dtype.__args__[0].__args__)
            buf.blankline()
            buf.blankline()
            buf.append(f"**Allowed values**: {valid_values}")

        buf.indent_size -= 1

        return str(buf)