def pformat_tabs( obj: object, width: int = 80, depth: Optional[int] = None, *, compact: bool = False, ) -> str: """ Format a Python object into a pretty-printed representation. Indentation is set at one tab. :param obj: The object to format. :param width: The maximum width of the output. :param depth: :param compact: """ prettyprinter = FancyPrinter(indent=4, width=width, depth=depth, compact=compact) buf = StringList() for line in prettyprinter.pformat(obj).splitlines(): buf.append(re.sub("^ {4}", r"\t", line)) return str(buf)
def append_doctring_from_another(target: Union[Type, Callable], original: Union[Type, Callable]): """ Sets the docstring of the ``target`` function to that of the ``original`` function. This may be useful for subclasses or wrappers that use the same arguments. Any indentation in either docstring is removed to ensure consistent indentation between the two docstrings. Bear this in mind if additional indentation is used in the docstring. :param target: The object to append the docstring to :param original: The object to copy the docstring from """ # this package from domdf_python_tools.stringlist import StringList target_doc = target.__doc__ original_doc = original.__doc__ if isinstance(original_doc, str) and isinstance(target_doc, str): docstring = StringList(cleandoc(target_doc)) docstring.blankline(ensure_single=True) docstring.append(cleandoc(original_doc)) docstring.blankline(ensure_single=True) target.__doc__ = str(docstring) elif not isinstance(target_doc, str) and isinstance(original_doc, str): docstring = StringList(cleandoc(original_doc)) docstring.blankline(ensure_single=True) target.__doc__ = str(docstring)
def dump_list(self, v) -> str: """ Serialize a list to TOML. :param v: :rtype: .. latex:clearpage:: """ single_line = super().dump_list(v) if len(single_line) <= self.max_width: return single_line retval = StringList(['[']) with retval.with_indent(" ", 1): for u in v: retval.append(f"{str(self.dump_value(u))},") retval.append(']') return str(retval)
def make_installation_instructions(options: Dict[str, Any], env: BuildEnvironment) -> List[str]: """ Make the content of an installation node. :param options: :param env: The Sphinx build environment. """ tabs: Dict[str, List[str]] = _get_installation_instructions(options, env) if not tabs: warnings.warn("No installation source specified. No installation instructions will be shown.") return [] content = StringList([".. tabs::", '']) content.set_indent_type(" ") for tab_name, tab_content in tabs.items(): with content.with_indent_size(1): content.append(f".. tab:: {tab_name}") content.blankline(ensure_single=True) with content.with_indent_size(2): content.extend([f"{line}" if line else '' for line in tab_content]) return list(content)
def document_keys(self, keys: List[str], types: Dict[str, Type], docstrings: Dict[str, List[str]]): """ Document keys in a :class:`typing.TypedDict`. :param keys: List of key names to document. :param types: Mapping of key names to types. :param docstrings: Mapping of key names to docstrings. """ content = StringList() for key in keys: if key in types: key_type = f"({format_annotation(types[key])}) " else: key_type = '' if key in docstrings: content.append( f" * **{key}** {key_type}-- {' '.join(docstrings.get(key, ''))}" ) else: content.append(f" * **{key}** {key_type}") for line in content: self.add_line(line, self.get_sourcename())
def test_check_file_regression(tmp_pathplus: PathPlus, file_regression: FileRegressionFixture): with pytest.raises(FileNotFoundError, match=no_such_file_pattern): check_file_output(tmp_pathplus / "file.txt", file_regression) check_file_regression("Success!\n\nThis is a test.", file_regression) result = StringList("Success!") result.blankline() result.blankline(ensure_single=True) result.append("This is a test.") check_file_regression(result, file_regression)
def create_docs_install_block( repo_name: str, username: str, conda: bool = True, pypi: bool = True, pypi_name: Optional[str] = None, conda_channels: Optional[Sequence[str]] = None, ) -> str: """ Create the installation instructions for insertion into the documentation. :param repo_name: The name of the GitHub repository. :param username: The username of the GitHub account that owns the repository. (Not used; ensures API compatibility with :func:`~.create_readme_install_block`) :param conda: Whether to show Anaconda installation instructions. :param pypi: Whether to show PyPI installation instructions. :param pypi_name: The name of the project on PyPI. Defaults to the value of ``repo_name`` if unset. :param conda_channels: List of required Conda channels. :return: The installation block created from the above settings. """ if not conda_channels and conda: raise ValueError( "Please supply a list of 'conda_channels' if Conda builds are supported" ) if not pypi_name: pypi_name = repo_name conda_channels = DelimitedList(conda_channels or []) block = StringList( [".. start installation", '', f".. installation:: {pypi_name}"]) with block.with_indent_size(1): if pypi: block.append(":pypi:") block.append(":github:") if conda: block.append(":anaconda:") block.append(f":conda-channels: {conda_channels:, }") block.blankline() block.append(".. end installation") return str(block)
def enable_docs( repo_path: pathlib.Path, templates: Environment, init_repo_templates: Environment, ) -> List[str]: docs_dir = PathPlus(repo_path / templates.globals["docs_dir"]) docs_dir.maybe_make() (docs_dir / "api").maybe_make() for filename in {"index.rst"}: template = init_repo_templates.get_template(filename) (docs_dir / filename).write_clean(template.render()) api_buf = StringList() header_line: str = '=' * (len(templates.globals["import_name"]) + 1) api_buf.append(header_line) api_buf.append(templates.globals["import_name"]) api_buf.append(header_line) api_buf.blankline(ensure_single=True) api_buf.append(f".. automodule:: {templates.globals['import_name']}") api_buf.blankline(ensure_single=True) (docs_dir / "api" / templates.globals["modname"]).with_suffix(".rst").write_lines(api_buf) return [ posixpath.join(templates.globals["docs_dir"], "api", f"{templates.globals['modname']}.rst"), posixpath.join(templates.globals["docs_dir"], "index.rst"), ]
def configure(app: Sphinx, config: Config): """ Configure Sphinx Extension. :param app: The Sphinx application. :param config: """ latex_elements = getattr(config, "latex_elements", {}) latex_extrapackages = StringList(latex_elements.get("extrapackages", '')) latex_extrapackages.append(r"\usepackage{needspace}") latex_elements["extrapackages"] = str(latex_extrapackages) config.latex_elements = latex_elements # type: ignore
def dump_list(self, v): values = DelimitedList(str(self.dump_value(u)) for u in v) single_line = f"[{values:, }]" if len(single_line) <= self.max_width: return single_line retval = StringList(['[']) with retval.with_indent(" ", 1): for u in v: retval.append(f"{str(self.dump_value(u))},") retval.append(']') return str(retval)
def validate_config(app: Sphinx, config: Config): r""" Validate the provided configuration values. :param app: The Sphinx app. :param config: """ rst_prolog: Union[str, StringList] = config.rst_prolog or '' nbsp_sub = ".. |nbsp| unicode:: 0xA0\n :trim:" if nbsp_sub not in rst_prolog: rst_prolog = StringList(rst_prolog) rst_prolog.append(nbsp_sub) config.rst_prolog = str(rst_prolog) # type: ignore
def run_generic(self) -> List[nodes.Node]: """ Generate generic reStructuredText output. """ content = StringList() content.indent_type = ' ' for obj_name in get_random_sample(sorted(set(self.content))): if self.options.get("module", '') and obj_name.startswith('.'): obj_name = obj_name.replace('.', f"{self.options['module']}.", 1) name_parts = obj_name.split('.') module = import_module('.'.join(name_parts[:-1])) obj = getattr(module, name_parts[-1]) if isinstance(obj, FunctionType): content.append( f"* :func:`{'.'.join(name_parts[1:])}() <.{obj_name}>`") elif isinstance(obj, type): content.append( f"* :class:`{'.'.join(name_parts[1:])} <.{obj_name}>`") else: content.append( f"* :py:obj:`{'.'.join(name_parts[1:])} <.{obj_name}>`") with content.with_indent_size(2): content.blankline() content.append(format_signature(obj)) content.blankline() content.append( inspect.cleandoc(obj.__doc__ or '').split("\n\n")[0]) content.blankline() targetid = f'sphinx-highlights-{self.env.new_serialno("sphinx-highlights"):d}' targetnode = nodes.target('', '', ids=[targetid]) view = ViewList(content) body_node = nodes.container(rawsource=str(content)) self.state.nested_parse(view, self.content_offset, body_node) # type: ignore sphinx_highlights_purger.add_node(self.env, body_node, targetnode, self.lineno) return [targetnode, body_node]
def test_append(self): sl = StringList() sl.append('') assert sl == [''] sl.append('') assert sl == ['', ''] sl.append("hello") assert sl == ['', '', "hello"] sl.append("world\n\n\n") assert sl == ['', '', "hello", "world", '', '', ''] sl.append("1234") assert sl == ['', '', "hello", "world", '', '', '', "1234"]
def get_linux_ci_requirements(self) -> List[str]: """ Returns the Python requirements to run tests for on Linux. """ dependency_lines = StringList( self.templates.globals["github_ci_requirements"]["Linux"]["pre"]) dependency_lines.extend(self.standard_python_install_lines) if self.templates.globals["enable_tests"]: dependency_lines.append( "python -m pip install --upgrade coverage_pyver_pragma") dependency_lines.extend(self._get_additional_requirements()) dependency_lines.extend( self.templates.globals["github_ci_requirements"]["Linux"]["post"]) return dependency_lines
def make_property(buf: StringList, name: str) -> None: """ Add the signature of a property to the given :class:`domdf_python_tools.stringlist.StringList`. :param buf: :param name: """ # noqa: D400 with buf.with_indent_size(buf.indent_size + 1): buf.blankline(ensure_single=True) buf.append(f"@property\ndef {name}(self): ...") buf.blankline(ensure_single=True) with buf.with_indent_size(buf.indent_size + 1): buf.blankline(ensure_single=True) buf.append(f"@{name}.setter\ndef {name}(self, value): ...") buf.blankline(ensure_single=True)
def configure(app: Sphinx, config: Config): """ Configure :mod:`sphinx_toolbox.code`. .. versionadded:: 2.9.0 :param app: The Sphinx application. :param config: """ latex_elements = getattr(config, "latex_elements", {}) latex_preamble = StringList(latex_elements.get("preamble", '')) latex_preamble.blankline() latex_preamble.append(r"\definecolor{nbsphinxin}{HTML}{307FC1}") latex_preamble.append(r"\definecolor{nbsphinxout}{HTML}{BF5B3D}") latex_elements["preamble"] = str(latex_preamble) config.latex_elements = latex_elements # type: ignore
def make_rest_example( options: Dict[str, Any], env: sphinx.environment.BuildEnvironment, content: Sequence[str], ) -> List[str]: """ Make the content of a reST Example node. :param options: :param content: The user-provided content of the directive. """ output = StringList(".. container:: rest-example") output.indent_type = ' ' * env.config.docutils_tab_width output.blankline() with output.with_indent_size(1): output.append(".. code-block:: rest") with output.with_indent_size(2): for option, value in options.items(): if value is None: output.append(f":{option}:") else: output.append(f":{option}: {value}") output.blankline() for line in content: output.append(line) output.blankline(ensure_single=True) for line in content: output.append(line) output.blankline(ensure_single=True) return list(output)
def test_blankline(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl.blankline() assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.blankline() assert sl == ['', '', "hello", "world", '', '', '', "1234", '', ''] sl.blankline(ensure_single=True) assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.blankline(ensure_single=True) assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.append('\t') sl.blankline(ensure_single=True) assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.append(" ") sl.blankline(ensure_single=True) assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.append(" ") sl.blankline(ensure_single=True) sl.blankline() assert sl == ['', '', "hello", "world", '', '', '', "1234", '', '']
def make_pr_details() -> str: """ Returns the body of a pull request. """ buf = StringList() buf.extend([ "<details>", " <summary>Commands</summary>", '', " * `@repo-helper recreate` will recreate the pull request by checking" " out the current master branch and running `repo-helper` on that.", "</details>", ]) buf.blankline(ensure_single=True) buf.append("---") buf.blankline(ensure_single=True) buf.append(make_footer_links("repo-helper", "repo-helper-bot", event_date=date.today(), type="app")) return str(buf)
def create_body_overloads(self) -> StringList: """ Create the overloaded implementations for insertion into to the body of the documenter's output. """ output = StringList() formatted_overloads = [] output.blankline() # output.append(":Overloaded Implementations:") output.append(":Overloads:") output.blankline() # Size varies depending on docutils config output.indent_type = ' ' output.indent_size = self.env.app.config.docutils_tab_width # type: ignore if self.analyzer and '.'.join(self.objpath) in self.analyzer.overloads: for overload in self.analyzer.overloads.get('.'.join(self.objpath)): # type: ignore overload = self.process_overload_signature(overload) buf = [format_annotation(self.object), r"\("] for name, param in overload.parameters.items(): buf.append(f"**{name}**") if param.annotation is not Parameter.empty: buf.append(r"\: ") buf.append(format_annotation(param.annotation)) if param.default is not Parameter.empty: buf.append(" = ") buf.append(param.default) buf.append(r"\, ") if buf[-2][-1] != '`': buf[-1] = r" )" else: buf[-1] = r")" if overload.return_annotation is not Parameter.empty: buf.append(" -> ") buf.append(format_annotation(overload.return_annotation)) formatted_overloads.append(''.join(buf)) if len(formatted_overloads) == 1: output.append(formatted_overloads[0]) else: for line in formatted_overloads: output.append(f"* {line}") output.blankline(ensure_single=True) return output return StringList()
def run(self) -> List[nodes.Node]: """ Create the installation node. """ if self.env.docname != self.env.config.master_doc: # pragma: no cover warnings.warn( "The 'sidebar-links' directive can only be used on the Sphinx master doc. " "No links will be shown.", UserWarning, ) return [] body = StringList([ ".. toctree::", " :hidden:", ]) with body.with_indent(" ", 1): if "caption" in self.options: body.append(f":caption: {self.options['caption']}") else: # pragma: no cover body.append(":caption: Links") body.blankline() if "github" in self.options: body.append(self.process_github_option()) if "pypi" in self.options: body.append( f"PyPI <https://pypi.org/project/{self.options['pypi']}>") body.extend(self.content) body.blankline() body.blankline() only_node = addnodes.only(expr="html") content_node = nodes.paragraph(rawsource=str(body)) only_node += content_node self.state.nested_parse(docutils.statemachine.StringList(body), self.content_offset, content_node) return [only_node]
def add_autosummary(self): """ Add the :rst:dir:`autosummary` table of this documenter. """ if not self.options.get("autosummary", False): return content = StringList() content.indent_type = ' ' * 4 sourcename = self.get_sourcename() grouped_documenters = self.get_grouped_documenters() for section, documenters in grouped_documenters.items(): if not self.options.get("autosummary-no-titles", False): content.append(f"**{section}:**") content.blankline(ensure_single=True) content.append(".. autosummary::") content.blankline(ensure_single=True) member_order = get_first_matching( lambda x: x != "groupwise", [ self.options.get("member-order", ''), self.env.config.autodocsumm_member_order, self.env.config.autodoc_member_order, ], default="alphabetical", ) with content.with_indent_size(content.indent_size + 1): for documenter, _ in self.sort_members(documenters, member_order): content.append(f"~{documenter.fullname}") content.blankline() for line in content: self.add_line(line, sourcename)
def make(self) -> StringList: """ Constructs the contents of the shields block. """ buf = StringList() sections = {} substitutions = {} repo_name = self.repo_name username = self.username pypi_name = self.pypi_name if self.unique_name: buf.append(f".. start shields {self.unique_name.lstrip('_')}") else: buf.append(f".. start shields") buf.blankline(ensure_single=True) buf.extend( [".. list-table::", "\t:stub-columns: 1", "\t:widths: 10 90"]) buf.blankline(ensure_single=True) sections["Activity"] = [ "commits-latest", "commits-since", "maintained" ] substitutions["commits-since"] = self.make_activity_shield( repo_name, username, self.version) substitutions["commits-latest"] = self.make_last_commit_shield( repo_name, username) substitutions["maintained"] = self.make_maintained_shield() sections["Other"] = ["license", "language", "requires"] substitutions["requires"] = self.make_requires_shield( repo_name, username) substitutions["license"] = self.make_license_shield( repo_name, username) substitutions["language"] = self.make_language_shield( repo_name, username) sections["QA"] = ["codefactor", "actions_flake8", "actions_mypy"] substitutions["codefactor"] = self.make_codefactor_shield( repo_name, username) substitutions["actions_flake8"] = self.make_actions_shield( repo_name, username, "Flake8", "Flake8 Status") substitutions["actions_mypy"] = self.make_actions_shield( repo_name, username, "mypy", "mypy status") if self.docs: sections["Docs"] = ["docs", "docs_check"] substitutions["docs"] = self.make_rtfd_shield( repo_name, self.docs_url) substitutions["docs_check"] = self.make_docs_check_shield( repo_name, username) sections["Tests"] = [] if "Linux" in self.platforms: sections["Tests"].append("actions_linux") substitutions["actions_linux"] = self.make_actions_shield( repo_name, username, "Linux", "Linux Test Status", ) if "Windows" in self.platforms: sections["Tests"].append("actions_windows") substitutions["actions_windows"] = self.make_actions_shield( repo_name, username, "Windows", "Windows Test Status", ) if "macOS" in self.platforms: sections["Tests"].append("actions_macos") substitutions["actions_macos"] = self.make_actions_shield( repo_name, username, "macOS", "macOS Test Status", ) if self.tests: sections["Tests"].append("coveralls") substitutions["coveralls"] = self.make_coveralls_shield( repo_name, username) if self.on_pypi: sections["PyPI"] = [ "pypi-version", "supported-versions", "supported-implementations", "wheel" ] substitutions["pypi-version"] = self.make_pypi_version_shield( pypi_name) substitutions[ "supported-versions"] = self.make_python_versions_shield( pypi_name) substitutions[ "supported-implementations"] = self.make_python_implementations_shield( pypi_name) substitutions["wheel"] = self.make_wheel_shield(pypi_name) sections["Activity"].append("pypi-downloads") substitutions["pypi-downloads"] = self.make_pypi_downloads_shield( pypi_name) if self.conda: sections["Anaconda"] = ["conda-version", "conda-platform"] substitutions["conda-version"] = self.make_conda_version_shield( pypi_name, self.primary_conda_channel) substitutions["conda-platform"] = self.make_conda_platform_shield( pypi_name, self.primary_conda_channel) if self.docker_shields: docker_name = self.docker_name sections["Docker"] = [ "docker_build", "docker_automated", "docker_size" ] substitutions[ "docker_build"] = self.make_docker_build_status_shield( docker_name, username) substitutions[ "docker_automated"] = self.make_docker_automated_build_shield( docker_name, username) substitutions["docker_size"] = self.make_docker_size_shield( docker_name, username) for section in self.sections: if section not in sections or not sections[section]: continue images = DelimitedList( [f"|{name}{self.unique_name}|" for name in sections[section]]) buf.extend([f" * - {section}", f" - {images: }"]) for sub_name in self.substitutions: if sub_name not in substitutions: continue buf.blankline(ensure_single=True) buf.append( f".. |{sub_name}{self.unique_name}| {substitutions[sub_name][3:]}" ) buf.blankline(ensure_single=True) buf.append(".. end shields") # buf.blankline(ensure_single=True) return buf
def _reformat_blocks(blocks: List[List[str]]): cursor = 1 while cursor < len(blocks): if isinstance(blocks[cursor - 1], (_MultilineFunction, _DecoratedFunction, _Class)): # Add a blank line after _Variables, a multi-line function, or a decorated function blocks.insert(cursor, []) cursor += 1 if blocks[cursor] and blocks[cursor - 1] and re.match( "^[ \t]+", blocks[cursor - 1][-1]) and not re.match( "^[ \t]+", blocks[cursor][0]): # Add a blank line after a dedent blocks.insert(cursor, []) cursor += 1 if isinstance(blocks[cursor - 1], _Variables): # Add a blank line before and after _Variables blocks.insert(cursor - 1, []) blocks.insert(cursor + 1, []) cursor += 2 if isinstance(blocks[cursor], _Variables): # Add a blank line before and after _Variables blocks.insert(cursor, []) blocks.insert(cursor + 2, []) cursor += 2 if isinstance(blocks[cursor], (_DecoratedFunction, _MultilineFunction)): # Add a blank line before a decorated function blocks.insert(cursor, []) cursor += 1 if isinstance(blocks[cursor], _Class): if (cursor + 1 < len(blocks) and isinstance(blocks[cursor + 1], _Function) and not isinstance(blocks[cursor + 1], (_DecoratedFunction, _MultilineFunction)) and blocks[cursor][-1].lstrip().startswith("class") and blocks[cursor + 1][0][0].isspace()): blocks.insert(cursor, []) cursor += 2 else: blocks.insert(cursor, []) blocks.insert(cursor + 2, []) cursor += 3 cursor += 1 output = StringList() # Remove trailing whitespace from each block for block in blocks: if output and not block and not output[-1]: # Remove duplicate new lines continue output.append('\n'.join(block).rstrip()) if not output[0]: output.pop(0) output.blankline(ensure_single=True) return output
def rewrite_docs_index(repo_path: pathlib.Path, templates: Environment) -> List[str]: """ Update blocks in the documentation ``index.rst`` file. :param repo_path: Path to the repository root. :param templates: """ index_rst_file = PathPlus(repo_path / templates.globals["docs_dir"] / "index.rst") index_rst_file.parent.maybe_make() # Set up the blocks sb = ShieldsBlock( username=templates.globals["username"], repo_name=templates.globals["repo_name"], version=templates.globals["version"], conda=templates.globals["enable_conda"], tests=templates.globals["enable_tests"] and not templates.globals["stubs_package"], docs=templates.globals["enable_docs"], pypi_name=templates.globals["pypi_name"], docker_shields=templates.globals["docker_shields"], docker_name=templates.globals["docker_name"], platforms=templates.globals["platforms"], pre_commit=templates.globals["enable_pre_commit"], on_pypi=templates.globals["on_pypi"], primary_conda_channel=templates.globals["primary_conda_channel"], ) sb.set_docs_mode() make_out = sb.make() shield_block_list = StringList([*make_out[0:2], ".. only:: html"]) with shield_block_list.with_indent_size(1): shield_block_list.extend(make_out[1:-1]) shield_block_list.append(make_out[-1]) shields_block = str(shield_block_list) if templates.globals["license"] == "GNU General Public License v2 (GPLv2)": source = f"https://img.shields.io/github/license/{templates.globals['username']}/{templates.globals['repo_name']}" shields_block.replace( source, "https://img.shields.io/badge/license-GPLv2-orange") # .. image:: https://img.shields.io/badge/License-LGPL%20v3-blue.svg install_block = create_docs_install_block( templates.globals["repo_name"], templates.globals["username"], templates.globals["enable_conda"], templates.globals["on_pypi"], templates.globals["pypi_name"], templates.globals["conda_channels"], ) + '\n' links_block = create_docs_links_block( templates.globals["username"], templates.globals["repo_name"], ) # Do the replacement index_rst = index_rst_file.read_text(encoding="UTF-8") index_rst = shields_regex.sub(shields_block, index_rst) index_rst = installation_regex.sub(install_block, index_rst) index_rst = links_regex.sub(links_block, index_rst) index_rst = short_desc_regex.sub( ".. start short_desc\n\n.. documentation-summary::\n\t:meta:\n\n.. end short_desc", index_rst, ) if ":caption: Links" not in index_rst and not templates.globals[ "preserve_custom_theme"]: index_rst = index_rst.replace( ".. start links", '\n'.join([ ".. sidebar-links::", "\t:caption: Links", "\t:github:", (f" :pypi: {templates.globals['pypi_name']}" if templates.globals["on_pypi"] else ''), '', '', ".. start links", ])) index_rst_file.write_clean(index_rst) return [index_rst_file.relative_to(repo_path).as_posix()]
def coloured_diff( a: Sequence[str], b: Sequence[str], fromfile: str = '', tofile: str = '', fromfiledate: str = '', tofiledate: str = '', n: int = 3, lineterm: str = '\n', removed_colour: terminal_colours.Colour = terminal_colours.Fore.RED, added_colour: terminal_colours.Colour = terminal_colours.Fore.GREEN, ) -> str: r""" Compare two sequences of lines; generate the delta as a unified diff. Unified diffs are a compact way of showing line changes and a few lines of context. The number of context lines is set by ``n`` which defaults to three. By default, the diff control lines (those with ``---``, ``+++``, or ``@@``) are created with a trailing newline. This is helpful so that inputs created from ``file.readlines()`` result in diffs that are suitable for ``file.writelines()`` since both the inputs and outputs have trailing newlines. For inputs that do not have trailing newlines, set the lineterm argument to ``''`` so that the output will be uniformly newline free. The unidiff format normally has a header for filenames and modification times. Any or all of these may be specified using strings for ``fromfile``, ``tofile``, ``fromfiledate``, and ``tofiledate``. The modification times are normally expressed in the ISO 8601 format. .. versionadded:: 0.3.0 .. latex:clearpage:: **Example:** >>> for line in coloured_diff( ... 'one two three four'.split(), ... 'zero one tree four'.split(), 'Original', 'Current', ... '2005-01-26 23:30:50', '2010-04-02 10:20:52', ... lineterm='', ... ): ... print(line) # doctest: +NORMALIZE_WHITESPACE --- Original 2005-01-26 23:30:50 +++ Current 2010-04-02 10:20:52 @@ -1,4 +1,4 @@ +zero one -two -three +tree four :param a: :param b: :param fromfile: :param tofile: :param fromfiledate: :param tofiledate: :param n: :param lineterm: :param removed_colour: The :class:`~consolekit.terminal_colours.Colour` to use for lines that were removed. :param added_colour: The :class:`~consolekit.terminal_colours.Colour` to use for lines that were added. """ buf = StringList() diff = difflib.unified_diff(a, b, fromfile, tofile, fromfiledate, tofiledate, n, lineterm) for line in diff: if line.startswith('+'): buf.append(added_colour(line)) elif line.startswith('-'): buf.append(removed_colour(line)) else: buf.append(line) buf.blankline(ensure_single=True) return str(buf)
def make_pre_commit(repo_path: pathlib.Path, templates: Environment) -> List[str]: """ Add configuration for ``pre-commit``. https://github.com/pre-commit/pre-commit # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks :param repo_path: Path to the repository root. :param templates: """ docs_dir = templates.globals["docs_dir"] import_name = templates.globals["import_name"] stubs_package = templates.globals["stubs_package"] non_source_files = [ posixpath.join(docs_dir, "conf"), "__pkginfo__", "setup" ] domdfcoding_hooks = Repo( repo=make_github_url("domdfcoding", "pre-commit-hooks"), rev="v0.3.0", hooks=[ { "id": "requirements-txt-sorter", "args": ["--allow-git"] }, { "id": "check-docstring-first", "exclude": fr"^({'|'.join(non_source_files)}|{templates.globals['tests_dir']}/.*)\.py$" }, "bind-requirements", ]) flake8_dunder_all = Repo( repo=make_github_url("domdfcoding", "flake8-dunder-all"), rev="v0.1.8", hooks=[{ "id": "ensure-dunder-all", "files": fr"^{import_name}{'-stubs' if stubs_package else ''}/.*\.py$" }]) snippet_fmt = Repo( repo=make_github_url("python-formate", "snippet-fmt"), rev="v0.1.4", hooks=["snippet-fmt"], ) formate_excludes = fr"^({'|'.join([*templates.globals['yapf_exclude'], *non_source_files])})\.(_)?py$" formate = Repo( repo=make_github_url("python-formate", "formate"), rev="v0.4.9", hooks=[{ "id": "formate", "exclude": formate_excludes }], ) dep_checker_args = [templates.globals["import_name"].replace('.', '/')] if templates.globals["source_dir"]: dep_checker_args.extend( ["--work-dir", templates.globals["source_dir"]]) dep_checker = Repo(repo=make_github_url("domdfcoding", "dep_checker"), rev="v0.6.2", hooks=[{ "id": "dep_checker", "args": dep_checker_args }]) pre_commit_file = PathPlus(repo_path / ".pre-commit-config.yaml") if not pre_commit_file.is_file(): pre_commit_file.touch() dumper = ruamel.yaml.YAML() dumper.indent(mapping=2, sequence=3, offset=1) output = StringList([ f"# {templates.globals['managed_message']}", "---", '', f"exclude: {templates.globals['pre_commit_exclude']}", '', "repos:", ]) indent_re = re.compile("^ {3}") managed_hooks = [ pyproject_parser, pre_commit_hooks, domdfcoding_hooks, flake8_dunder_all, flake2lint, pygrep_hooks, pyupgrade, lucas_c_hooks, snippet_fmt, formate, ] if not templates.globals["stubs_package"]: managed_hooks.append(dep_checker) managed_hooks_urls = [str(hook.repo) for hook in managed_hooks] custom_hooks_comment = "# Custom hooks can be added below this comment" for hook in managed_hooks: buf = StringIO() dumper.dump(hook.to_dict(), buf) output.append(indent_re.sub(" - ", indent(buf.getvalue(), " "))) output.blankline(ensure_single=True) output.append(custom_hooks_comment) output.blankline(ensure_single=True) raw_yaml = pre_commit_file.read_text() if custom_hooks_comment in raw_yaml: custom_hooks_yaml = pre_commit_file.read_text().split( custom_hooks_comment)[1] custom_hooks = [] local_hooks = [] for repo in yaml_safe_loader.load(custom_hooks_yaml) or []: if repo["repo"] == "local": local_hooks.append(repo) elif repo["repo"] not in managed_hooks_urls: custom_hooks.append(Repo(**repo)) for hook in custom_hooks: buf = StringIO() dumper.dump(hook.to_dict(), buf) output.append(indent_re.sub(" - ", indent(buf.getvalue(), " "))) output.blankline(ensure_single=True) for hook in local_hooks: buf = StringIO() dumper.dump(hook, buf) output.append(indent_re.sub(" - ", indent(buf.getvalue(), " "))) output.blankline(ensure_single=True) pre_commit_file.write_lines(output) return [pre_commit_file.name]
def make_documentation(cls): """ Returns the reStructuredText documentation for the :class:`~.ConfigVar`. """ docstring = cls.__doc__ or '' docstring = (indent(dedent(docstring), tab)) if not docstring.startswith('\n'): docstring = '\n' + docstring buf = StringList() buf.indent_type = " " buf.blankline(ensure_single=True) buf.append(f".. conf:: {cls.__name__}") buf.append(docstring) buf.blankline() buf.indent_size += 1 buf.append(f"**Required**: {'yes' if cls.required else 'no'}") buf.blankline() buf.blankline() if not cls.required: if cls.default == []: buf.append("**Default**: [ ]") elif cls.default == {}: buf.append("**Default**: { }") elif isinstance(cls.default, Callable): # type: ignore buf.append( f"**Default**: The value of :conf:`{cls.default.__name__}`" ) elif isinstance(cls.default, bool): buf.append(f"**Default**: :py:obj:`{cls.default}`") elif isinstance(cls.default, str): if cls.default == '': buf.append("**Default**: <blank>") else: buf.append(f"**Default**: ``{cls.default}``") else: buf.append(f"**Default**: {cls.default}") buf.blankline() buf.blankline() buf.append(f"**Type**: {get_yaml_type(cls.dtype)}") if is_literal_type(cls.dtype): valid_values = ", ".join(f"``{x}``" for x in cls.dtype.__args__) buf.blankline() buf.blankline() buf.append(f"**Allowed values**: {valid_values}") elif hasattr(cls.dtype, "__args__") and is_literal_type( cls.dtype.__args__[0]): valid_values = ", ".join(f"``{x}``" for x in cls.dtype.__args__[0].__args__) buf.blankline() buf.blankline() buf.append(f"**Allowed values**: {valid_values}") buf.indent_size -= 1 return str(buf)
def make_module( name: str, module: ModuleType, attr_list: Iterable[str] = (), first_party_imports: Iterable[str] = (), converter=Converter() ) -> bool: """ Create type stubs for a module. :param name: The name of the module. :param module: The module object. :param attr_list: A list of attributes to create stubs for. :param first_party_imports: A list of first-party imports to include at the top of the file. """ buf = StringList() path = name.split('.') stubs_dir = PathPlus(f"{path[0]}-stubs") stubs_dir.maybe_make() (stubs_dir / '/'.join(x for x in path[1:-1])).maybe_make(parents=True) stub_file = stubs_dir / '/'.join(x for x in path[1:-1]) / f"{path[-1]}.pyi" import_name = name.replace(".__init__", '') for imp in (*make_imports(name), *first_party_imports): imp = re.sub( fr"import {import_name}\.([A-Za-z_]+)\.([A-Za-z_]+)\.([A-Za-z_]+)", r"from .\1.\2 import \3", imp) imp = re.sub(fr"import {import_name}\.([A-Za-z_]+)\.([A-Za-z_]+)", r"from .\1 import \2", imp) imp = re.sub(fr"import {import_name}\.([A-Za-z_]+)", r"from . import \1", imp) imp = re.sub(fr"import {import_name}$", '', imp) buf.append(imp) if import_name != "System.ComponentModel": if import_name == "System": buf.append("from .ComponentModel import MarshalByValueComponent") else: buf.append( "from System.ComponentModel import MarshalByValueComponent") for attr_name in dedup(attr_list): stub_code = walk_attrs(module, attr_name, converter=converter) stub_code = stub_code.replace(f": {import_name}.", ": ") stub_code = stub_code.replace(f" -> {import_name}.", " -> ") stub_code = stub_code.replace(f"[{import_name}.", '[') stub_code.replace( "System.Collections.Generic.IDictionary[System.String,System.String]", "Any") buf.blankline(ensure_single=True) buf.blankline() buf.append(stub_code) sorted_code = isort.code(str(buf), config=isort_config) sans_unneeded_imports = fix_code( sorted_code, additional_imports=None, expand_star_imports=False, remove_all_unused_imports=False, remove_duplicate_keys=False, remove_unused_variables=False, ignore_init_module_imports=False, ) stub_file.write_text(sans_unneeded_imports) return True
def walk_attrs(module: ModuleType, attr_name, converter=Converter()) -> str: """ Create stubs for given class, including all attributes. :param module: :param attr_name: :param converter: :return: """ buf = StringList(convert_indents=True) buf.indent_type = " " if not is_dunder(attr_name): obj = getattr(module, attr_name) # TODO: case where obj is not a class if not isinstance(obj, FunctionType): bases = [] for base in obj.__bases__: if base not in {System.Object, object}: if base.__name__ in converter.type_mapping: bases.append(converter.type_mapping[base.__name__]) else: bases.append(base.__name__) bases = list(filter(lambda x: x is Any, bases)) if bases: buf.append(f"class {attr_name}({', '.join(bases)}):\n") else: buf.append(f"class {attr_name}:\n") for child_attr_name in get_child_attrs(obj): try: child_obj = getattr(obj, child_attr_name) except TypeError as e: if str(e) in { "instance property must be accessed through a class instance", "property cannot be read", }: make_property(buf, child_attr_name) continue elif str( e ) == "instance attribute must be accessed through a class instance": print( f"{e.__class__.__name__}: '{e}' occurred for {attr_name}.{child_attr_name}" ) continue else: raise e # TODO: if isinstance(child_obj, FunctionType): return_type, arguments = get_signature(child_obj, child_attr_name, converter) with buf.with_indent_size(buf.indent_size + 1): if arguments is not None and arguments: signature = [] for idx, argument in enumerate(arguments.split(", ")): signature.append( f"{'_' * (idx + 1)}: {converter.convert_type(argument)}" ) line = f"def {child_attr_name}(self, {', '.join(signature)}) -> {return_type}: ..." if len(line) > 88: buf.blankline(ensure_single=True) buf.append(f"def {child_attr_name}(") with buf.with_indent_size(buf.indent_size + 2): buf.append("self,") for line in signature: buf.append(f"{line},") buf.append(f") -> {return_type}: ...\n") else: buf.append(line) elif arguments is None: buf.append( f"def {child_attr_name}(self, *args, **kwargs) -> {return_type}: ..." ) elif not arguments: # i.e. takes no arguments buf.append( f"def {child_attr_name}(self) -> {return_type}: ..." ) buf.blankline(ensure_single=True) return str(buf) return ''