Exemple #1
0
def visit_asset_node(translator: HTMLTranslator, node: AssetNode):
	"""
	Visit an :class:`~.AssetNode`.

	:param translator:
	:param node: The node being visited.
	"""

	if not hasattr(translator, "_asset_node_seen_files"):
		# Files that have already been seen
		translator._asset_node_seen_files = []  # type: ignore

	assets_out_dir = PathPlus(translator.builder.outdir) / "_assets"
	assets_out_dir.maybe_make(parents=True)

	source_file = PathPlus(translator.builder.confdir) / node["source_file"]

	if source_file not in translator._asset_node_seen_files and source_file.is_file():  # type: ignore
		# Avoid unnecessary copies of potentially large files.
		translator._asset_node_seen_files.append(source_file)  # type: ignore
		shutil.copy2(source_file, assets_out_dir)
	elif not source_file.is_file():
		stderr_writer(Fore.RED(f"{translator.builder.current_docname}: Asset file '{source_file}' not found."))
		translator.context.append('')
		return

	# Create the HTML
	current_uri = (pathlib.PurePosixPath('/') / translator.builder.current_docname).parent
	refuri = posixpath.relpath(f"/_assets/{node['refuri']}", str(current_uri))
	translator.body.append(f'<a class="reference external" href="{refuri}")/">')
	translator.context.append("</a>")
def process_multiple_notebooks(
		notebooks: Iterable[PathLike],
		outdir: PathLike,
		overwrite: bool = False,
		) -> int:
	"""
	Process multiple Jupyter notebooks for conversion into Python scripts.

	:param notebooks: An iterable of notebook filenames to process
	:param outdir: The directory to store the Python output in.
	:param overwrite: Whether to overwrite existing files.
	"""

	ret = 0
	outdir = PathPlus(outdir)

	for notebook in notebooks:
		notebook = PathPlus(notebook)
		outfile = outdir / f"{notebook.stem}.py"

		if outfile.is_file() and not overwrite:
			print(f"Info: Skipping existing file {outfile}")
		else:
			if notebook.is_file():
				print(f"Converting {notebook} to {outfile}")
				convert_notebook(notebook, outfile)
			else:
				print(f"{notebook} not found")
				ret |= 1

	return ret
Exemple #3
0
def requirements_from_flit(
    package_root: pathlib.Path,
    options: Dict,
    env: sphinx.environment.BuildEnvironment,
    extra: str,
) -> List[str]:
    """
	Load requirements from the ``[tool.flit.metadata.requires-extra]`` section of
	a ``pyproject.toml`` file in the root of the repository.

	:param package_root: The path to the package root.
	:param options:
	:param env:
	:param extra: The name of the "extra" that the requirements are for.

	:return: List of requirements.
	"""  # noqa D400

    pyproject_file = PathPlus(env.srcdir).parent / "pyproject.toml"

    if not pyproject_file.is_file():
        raise FileNotFoundError(
            f"Cannot find pyproject.toml in '{pyproject_file.parent}'")

    flit_extras = parse_pyproject_extras(pyproject_file,
                                         flavour="flit",
                                         normalize_func=normalize_keep_dot)

    if extra not in flit_extras:
        raise ValueError(
            f"'{extra}' not found in '[tool.flit.metadata.requires-extra]'")

    requirements = flit_extras[extra]

    return list(map(str, sorted(combine_requirements(requirements))))
Exemple #4
0
def requirements_from_setup_cfg(
    package_root: pathlib.Path,
    options: Dict,
    env: sphinx.environment.BuildEnvironment,
    extra: str,
) -> List[str]:
    """
	Load requirements from a ``setup.cfg`` file in the root of the repository.

	:param package_root: The path to the package root.
	:param options:
	:param env:
	:param extra: The name of the "extra" that the requirements are for.

	:return: List of requirements.
	"""

    setup_cfg_file = PathPlus(env.srcdir).parent / "setup.cfg"
    assert setup_cfg_file.is_file()

    setup_cfg = read_configuration(setup_cfg_file)

    if "options" in setup_cfg and "extras_require" in setup_cfg["options"]:
        if extra in setup_cfg["options"]["extras_require"]:
            return setup_cfg["options"]["extras_require"][extra]
        else:
            raise ValueError(
                f"'{extra}' not found in '[options.extras_require]'")
    else:
        raise ValueError(
            "'options.extras_require' section not found in 'setup.cfg")
Exemple #5
0
def make_docutils_conf(repo_path: pathlib.Path,
                       templates: Environment) -> List[str]:
    """
	Add configuration for ``Docutils``.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    file = PathPlus(repo_path / templates.globals["docs_dir"] /
                    "docutils.conf")
    file.parent.maybe_make(parents=True)

    if not file.is_file():
        file.write_text('\n'.join([
            "[restructuredtext parser]",
            "tab_width = 4",
            '',
            '',
        ]))

    conf = ConfigUpdater()
    conf.read(str(file))
    required_sections = ["restructuredtext parser"]

    for section in required_sections:
        if section not in conf.sections():
            conf.add_section(section)

    conf["restructuredtext parser"]["tab_width"] = 4

    file.write_clean(str(conf))

    return [file.relative_to(repo_path).as_posix()]
Exemple #6
0
def test_is_char_device_true():
    # Under Unix, /dev/null should generally be a char device.
    P = PathPlus("/dev/null")
    if not P.exists():
        pytest.skip("/dev/null required")
    assert P.is_char_device()
    assert not P.is_block_device()
    assert not P.is_file()
Exemple #7
0
def requirement(requirement: str, file: Optional[str] = None) -> int:
    """
	Add a requirement.
	"""

    # 3rd party
    from consolekit.utils import abort
    from domdf_python_tools.paths import PathPlus, traverse_to_file
    from domdf_python_tools.stringlist import StringList
    from packaging.requirements import InvalidRequirement
    from packaging.specifiers import SpecifierSet
    from shippinglabel import normalize_keep_dot
    from shippinglabel.requirements import ComparableRequirement, combine_requirements, read_requirements

    repo_dir: PathPlus = traverse_to_file(PathPlus.cwd(), "repo_helper.yml",
                                          "git_helper.yml")

    if file is None:
        requirements_file = repo_dir / "requirements.txt"

        if not requirements_file.is_file():
            raise abort(f"'{file}' not found.")

    else:
        requirements_file = PathPlus(file)

        if not requirements_file.is_file():
            raise abort("'requirements.txt' not found.")

    try:
        req = ComparableRequirement(requirement)
    except InvalidRequirement as e:
        raise BadRequirement(requirement, e)

    response = (PYPI_API / req.name / "json/").get()
    if response.status_code != 200:
        raise click.BadParameter(f"No such project {req.name}")
    else:
        req.name = normalize(response.json()["info"]["name"])
        if not req.specifier:
            req.specifier = SpecifierSet(
                f">={response.json()['info']['version']}")

        click.echo(f"Adding requirement '{req}'")

    requirements, comments, invalid_lines = read_requirements(
        req_file=requirements_file,
        include_invalid=True,
        normalize_func=normalize_keep_dot,
    )

    requirements.add(req)

    buf = StringList([*comments, *invalid_lines])
    buf.extend(str(req) for req in sorted(combine_requirements(requirements)))
    requirements_file.write_lines(buf)

    return 0
Exemple #8
0
def remove_lint_roller(repo_path: pathlib.Path,
                       templates: Environment) -> List[str]:
    """
	Remove the old lint_roller.sh script to the desired repo.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    lint_file = PathPlus(repo_path / "lint_roller.sh")
    if lint_file.is_file():
        lint_file.unlink()

    return [lint_file.name]
Exemple #9
0
def make_isort(repo_path: pathlib.Path, templates: Environment) -> List[str]:
	"""
	Remove the ``isort`` configuration file.

	https://github.com/timothycrosley/isort

	:param repo_path: Path to the repository root.
	:param templates:
	"""

	isort_file = PathPlus(repo_path / ".isort.cfg")
	isort_file.unlink(missing_ok=True)
	assert not isort_file.is_file()
	return [isort_file.name]
Exemple #10
0
def test_is_socket_true(BASE):
    P = PathPlus(BASE, "mysock")
    sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
    try:
        try:
            sock.bind(str(P))
        except OSError as e:
            if isinstance(
                    e, PermissionError) or "AF_UNIX path too long" in str(e):
                pytest.skip("cannot bind Unix socket: " + str(e))
        assert (P.is_socket())
        assert not (P.is_fifo())
        assert not (P.is_file())
    finally:
        sock.close()
Exemple #11
0
def remove_autodoc_augment_defaults(repo_path: pathlib.Path,
                                    templates: Environment) -> List[str]:
    """
	Remove the redundant "autodoc_augment_defaults" extension.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    target_file = PathPlus(repo_path / templates.globals["docs_dir"] /
                           "autodoc_augment_defaults.py")

    if target_file.is_file():
        target_file.unlink()

    return [target_file.relative_to(repo_path).as_posix()]
Exemple #12
0
def remove_copy_pypi_2_github(repo_path: pathlib.Path,
                              templates: Environment) -> List[str]:
    """
	Remove deprecated copy_pypi_2_github.py script.

	Uue octocheese and its GitHub Action instead.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    copier = PathPlus(repo_path / ".ci" / "copy_pypi_2_github.py")
    if copier.is_file():
        copier.unlink()

    return [copier.relative_to(repo_path).as_posix()]
Exemple #13
0
def travis_bad(repo_path: pathlib.Path, templates: Environment) -> List[str]:
    """
	Removes Travis CI configuration.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    if PathPlus(repo_path / ".travis.yml").is_file():
        PathPlus(repo_path / ".travis.yml").unlink()

    conda_file = PathPlus(repo_path / ".ci" / "travis_deploy_conda.sh")
    if conda_file.is_file():
        conda_file.unlink()

    return [".travis.yml", conda_file.relative_to(repo_path).as_posix()]
def paths_to_modules(*paths: PathLike) -> Iterator[str]:
    r"""
	Convert filesystem paths into dotted import names.

	For example, ``foo/bar.py`` becomes ``foo.bar``.

	.. versionadded:: 0.3.0

	:param \*paths: The paths to convert.
	"""

    for path in paths:
        path = PathPlus(path)

        if path.is_file() and path.suffix == ".py":
            path = path.with_suffix('')

        yield '.'.join(path.parts)
Exemple #15
0
def make_github_octocheese(repo_path: pathlib.Path,
                           templates: Environment) -> List[str]:
    """
	Add configuration for the OctoCheese GitHub Action.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    file = PathPlus(repo_path / ".github" / "workflows" / "octocheese.yml")
    file.parent.maybe_make(parents=True)

    if templates.globals["on_pypi"]:
        file.write_clean(templates.get_template(file.name).render())
    elif file.is_file():
        file.unlink()

    return [file.relative_to(repo_path).as_posix()]
Exemple #16
0
def make_conda_actions_ci(repo_path: pathlib.Path,
                          templates: Environment) -> List[str]:
    """
	Add configuration for testing conda packages on `GitHub Actions` to the desired repo.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    workflows_dir = PathPlus(repo_path / ".github" / "workflows")
    conda_ci_file = workflows_dir / "conda_ci.yml"

    if templates.globals["enable_conda"]:
        actions = templates.get_template("github_conda_ci.yml")
        workflows_dir.maybe_make(parents=True)

        def no_pypy_versions(versions):
            """
			Returns the subset of ``versions`` which does not end with ``-dev``.

			:param versions:
			"""

            return [
                v for v in no_dev_versions(versions)
                if "pypy" not in v.lower()
            ]

        pip_dependencies = ["whey-conda"]

        pyproject_file = PathPlus(repo_path / "pyproject.toml")
        if pyproject_file.is_file():
            data: DefaultDict[str,
                              Any] = DefaultDict(dom_toml.load(pyproject_file))
            pip_dependencies.extend(data["build-system"]["requires"])

        conda_ci_file.write_clean(
            actions.render(no_dev_versions=no_pypy_versions,
                           pip_dependencies=pip_dependencies))

    else:
        conda_ci_file.unlink(missing_ok=True)

    return [conda_ci_file.relative_to(repo_path).as_posix()]
Exemple #17
0
def requirements_from_pkginfo(
    package_root: pathlib.Path,
    options: Dict,
    env: sphinx.environment.BuildEnvironment,
    extra: str,
) -> List[str]:
    """
	Load requirements from a ``__pkginfo__.py`` file in the root of the repository.

	:param package_root: The path to the package root
	:param options:
	:param env:
	:param extra: The name of the "extra" that the requirements are for

	:return: List of requirements
	"""

    __pkginfo___file = PathPlus(env.srcdir).parent / "__pkginfo__.py"

    if not __pkginfo___file.is_file():
        raise FileNotFoundError(
            f"Cannot find __pkginfo__.py in '{__pkginfo___file.parent}'")

    try:
        spec = importlib.util.spec_from_file_location("__pkginfo__",
                                                      str(__pkginfo___file))

        if spec is not None:
            __pkginfo__ = importlib.util.module_from_spec(spec)

            if spec.loader:
                spec.loader.exec_module(__pkginfo__)
                requirements = __pkginfo__.extras_require[extra]
                return requirements
                # TODO: handle extra not found

    except ValueError:
        pass

    raise ImportError("Could not import __pkginfo__.py")
Exemple #18
0
def make_github_manylinux(repo_path: pathlib.Path,
                          templates: Environment) -> List[str]:
    """
	Add configuration for `GitHub Actions` manylinux wheel builds the desired repo.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    # TODO: deploys from other supported platforms for not pure python

    file = PathPlus(repo_path / ".github" / "workflows" /
                    "manylinux_build.yml")
    file.parent.maybe_make(parents=True)

    if not templates.globals["pure_python"] and "Linux" in templates.globals[
            "platforms"]:
        actions = templates.get_template(file.name)
        wheel_py_versions = []
        PYVERSIONS = []

        for pyver in range(6, 8):
            if f"3.{pyver}" in templates.globals["python_versions"]:
                wheel_py_versions.append(f"cp3{pyver}-cp3{pyver}m")
                PYVERSIONS.append(f'"3{pyver}"')

        for pyver in range(8, 10):
            if f"3.{pyver}" in templates.globals["python_versions"]:
                wheel_py_versions.append(f"cp3{pyver}-cp3{pyver}")
                PYVERSIONS.append(f'"3{pyver}"')

        file.write_clean(
            actions.render(
                wheel_py_versions=wheel_py_versions,
                PYVERSIONS=' '.join(PYVERSIONS),
            ))
    elif file.is_file():
        file.unlink()

    return [file.relative_to(repo_path).as_posix()]
def is_docker() -> bool:
    """
	Is this current environment running in docker?

	>>> type(is_docker())
	<class 'bool'>

	.. versionadded:: 0.6.0
	"""  # noqa: D400

    if os.path.exists("/.dockerenv"):
        return True

    cgroup = PathPlus("/proc/self/cgroup")

    if cgroup.is_file():
        try:
            return any("docker" in line for line in cgroup.read_lines())
        except FileNotFoundError:
            return False

    return False
Exemple #20
0
    def make_linux(self) -> PathPlus:
        """
		Create, update or remove the Linux action, as appropriate.
		"""

        platform_name = "Linux"
        ci_file = self.workflows_dir / f"python_ci_{platform_name.lower()}.yml"

        if platform_name in self.templates.globals["platforms"]:

            conda_pip_dependencies = ["mkrecipe"]

            pyproject_file = PathPlus(self.repo_path / "pyproject.toml")
            if pyproject_file.is_file():
                data: DefaultDict[str, Any] = DefaultDict(
                    dom_toml.load(pyproject_file))
                conda_pip_dependencies.extend(data["build-system"]["requires"])

            ci_file.write_clean(
                self.actions.render(
                    no_dev_versions=no_dev_versions,
                    python_versions=set_gh_actions_versions(
                        self.get_linux_ci_versions()),
                    ci_platform=platform_ci_names[platform_name],
                    ci_name=platform_name,
                    dependency_lines=self.get_linux_ci_requirements(),
                    gh_actions_versions=self.get_gh_actions_matrix(),
                    code_file_filter=self._code_file_filter,
                    run_on_tags="    tags:\n      - '*'",
                    is_experimental=self._is_experimental,
                    conda_pip_dependencies=conda_pip_dependencies,
                ))
        elif ci_file.is_file():
            ci_file.unlink()

        return ci_file
Exemple #21
0
class History:
	"""
	Represents a readline history file.

	.. versionadded:: 0.6.0
	"""

	#: The underlying file.
	file: PathPlus

	def __init__(self):
		self.file = PathPlus(appdirs.user_config_dir("github", "repo-helper")) / "interactive.hist"

	def read(self):
		"""
		Read the history file.
		"""

		if self.file.is_file():
			readline.read_history_file(str(self.file))

	def write(self):
		"""
		Read modified history file to disk.
		"""

		self.file.parent.maybe_make()
		readline.write_history_file(str(self.file))

	@staticmethod
	def get_history_items():
		"""
		Returns a list of items in the readline history.
		"""

		return [readline.get_history_item(i) for i in range(1, readline.get_current_history_length() + 1)]
Exemple #22
0
def make_pyproject(repo_path: pathlib.Path,
                   templates: Environment) -> List[str]:
    """
	Create the ``pyproject.toml`` file for :pep:`517`.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    pyproject_file = PathPlus(repo_path / "pyproject.toml")

    data: DefaultDict[str, Any]

    if pyproject_file.is_file():
        data = DefaultDict(dom_toml.load(pyproject_file))
    else:
        data = DefaultDict()

    data.set_default("build-system", {})
    build_backend = "setuptools.build_meta"

    build_requirements_ = {
        "setuptools>=40.6.0", "wheel>=0.34.2", "whey", "repo-helper",
        *templates.globals["tox_build_requirements"],
        *data["build-system"].get("requires", [])
    }

    build_requirements = sorted(
        combine_requirements(
            ComparableRequirement(req) for req in build_requirements_))

    if templates.globals["use_whey"]:
        for old_dep in ["setuptools", "wheel"]:
            if old_dep in build_requirements:
                build_requirements.remove(old_dep)  # type: ignore

    if templates.globals["use_whey"]:
        build_backend = "whey"
    elif "whey" in build_requirements:
        build_requirements.remove("whey")  # type: ignore

    if "repo-helper" in build_requirements:
        build_requirements.remove("repo-helper")  # type: ignore

    data["build-system"]["requires"] = list(map(str, build_requirements))
    data["build-system"]["build-backend"] = build_backend

    data["project"] = DefaultDict(data.get("project", {}))
    data["project"]["name"] = templates.globals["pypi_name"]
    data["project"]["version"] = templates.globals["version"]
    data["project"]["description"] = templates.globals["short_desc"]
    data["project"]["readme"] = "README.rst"
    data["project"]["keywords"] = sorted(templates.globals["keywords"])
    data["project"]["dynamic"] = [
        "requires-python", "classifiers", "dependencies"
    ]
    data["project"]["authors"] = [{
        "name": templates.globals["author"],
        "email": templates.globals["email"]
    }]
    data["project"]["license"] = {"file": "LICENSE"}

    if templates.globals["requires_python"] is not None:
        data["project"]["dynamic"].pop(0)
        data["project"][
            "requires-python"] = f">={templates.globals['requires_python']}"

    url = "https://github.com/{username}/{repo_name}".format_map(
        templates.globals)
    data["project"]["urls"] = {
        "Homepage":
        url,
        "Issue Tracker":
        "https://github.com/{username}/{repo_name}/issues".format_map(
            templates.globals),
        "Source Code":
        url,
    }

    if templates.globals["enable_docs"]:
        data["project"]["urls"]["Documentation"] = templates.globals[
            "docs_url"]

    # extras-require

    data["project"]["optional-dependencies"] = {}

    for extra, dependencies in templates.globals["extras_require"].items():
        data["project"]["optional-dependencies"][extra] = list(
            map(str, dependencies))

    if not data["project"]["optional-dependencies"]:
        del data["project"]["optional-dependencies"]

    # entry-points

    if templates.globals["console_scripts"]:
        data["project"]["scripts"] = dict(
            split_entry_point(e) for e in templates.globals["console_scripts"])

    data["project"]["entry-points"] = {}

    for group, entry_points in templates.globals["entry_points"].items():
        data["project"]["entry-points"][group] = dict(
            split_entry_point(e) for e in entry_points)

    if not data["project"]["entry-points"]:
        del data["project"]["entry-points"]

    # tool
    data.set_default("tool", {})

    # tool.mkrecipe
    if templates.globals["enable_conda"]:
        data["tool"].setdefault("mkrecipe", {})
        data["tool"]["mkrecipe"]["conda-channels"] = templates.globals[
            "conda_channels"]

        if templates.globals["conda_extras"] in (["none"], ["all"]):
            data["tool"]["mkrecipe"]["extras"] = templates.globals[
                "conda_extras"][0]
        else:
            data["tool"]["mkrecipe"]["extras"] = templates.globals[
                "conda_extras"]
    else:
        if "mkrecipe" in data["tool"]:
            del data["tool"]["mkrecipe"]

    # tool.whey
    data["tool"].setdefault("whey", {})

    data["tool"]["whey"]["base-classifiers"] = templates.globals["classifiers"]

    python_versions = set()
    python_implementations = set()

    for py_version in templates.globals["python_versions"]:
        py_version = str(py_version)

        if pre_release_re.match(py_version):
            continue

        pypy_version_m = _pypy_version_re.match(py_version)

        if py_version.startswith('3'):
            python_versions.add(py_version)
            python_implementations.add("CPython")

        elif pypy_version_m:
            python_implementations.add("PyPy")
            python_versions.add(f"3.{pypy_version_m.group(1)}")

    data["tool"]["whey"]["python-versions"] = natsorted(python_versions)
    data["tool"]["whey"]["python-implementations"] = sorted(
        python_implementations)

    data["tool"]["whey"]["platforms"] = templates.globals["platforms"]

    license_ = templates.globals["license"]
    data["tool"]["whey"]["license-key"] = {
        v: k
        for k, v in license_lookup.items()
    }.get(license_, license_)

    if templates.globals["source_dir"]:
        raise NotImplementedError(
            "Whey does not support custom source directories")

    elif templates.globals["import_name"] != templates.globals["pypi_name"]:
        if templates.globals["stubs_package"]:
            data["tool"]["whey"]["package"] = "{import_name}-stubs".format_map(
                templates.globals)
        else:
            data["tool"]["whey"]["package"] = posixpath.join(
                # templates.globals["source_dir"],
                templates.globals["import_name"].split('.', 1)[0], )

    if templates.globals["manifest_additional"]:
        data["tool"]["whey"]["additional-files"] = templates.globals[
            "manifest_additional"]
    elif "additional-files" in data["tool"]["whey"]:
        del data["tool"]["whey"]["additional-files"]

    if not templates.globals["enable_tests"] and not templates.globals[
            "stubs_package"]:
        data["tool"]["importcheck"] = data["tool"].get("importcheck", {})

    if templates.globals["enable_docs"]:
        data["tool"]["sphinx-pyproject"] = make_sphinx_config_dict(templates)
    else:
        data["tool"].pop("sphinx-pyproject", None)

    # [tool.mypy]
    # This is added regardless of the supported mypy version.
    # It isn't removed from setup.cfg unless the version is 0.901 or above
    data["tool"].setdefault("mypy", {})

    data["tool"]["mypy"].update(_get_mypy_config(templates.globals))

    if templates.globals["mypy_plugins"]:
        data["tool"]["mypy"]["plugins"] = templates.globals["mypy_plugins"]

    # [tool.dependency-dash]
    data["tool"].setdefault("dependency-dash", {})
    data["tool"]["dependency-dash"]["requirements.txt"] = {"order": 10}

    if templates.globals["enable_tests"]:
        data["tool"]["dependency-dash"]["tests/requirements.txt"] = {
            "order": 20,
            "include": False,
        }

    if templates.globals["enable_docs"]:
        data["tool"]["dependency-dash"]["doc-source/requirements.txt"] = {
            "order": 30,
            "include": False,
        }

    # [tool.snippet-fmt]
    data["tool"].setdefault("snippet-fmt", {})
    data["tool"]["snippet-fmt"].setdefault("languages", {})
    data["tool"]["snippet-fmt"].setdefault("directives", ["code-block"])

    data["tool"]["snippet-fmt"]["languages"]["python"] = {"reformat": True}
    data["tool"]["snippet-fmt"]["languages"]["TOML"] = {"reformat": True}
    data["tool"]["snippet-fmt"]["languages"]["ini"] = {}
    data["tool"]["snippet-fmt"]["languages"]["json"] = {}

    if not data["tool"]:
        del data["tool"]

    # TODO: managed message
    dom_toml.dump(data, pyproject_file, encoder=dom_toml.TomlEncoder)

    return [pyproject_file.name]
Exemple #23
0
def make_pre_commit(repo_path: pathlib.Path,
                    templates: Environment) -> List[str]:
    """
	Add configuration for ``pre-commit``.

	https://github.com/pre-commit/pre-commit

	# See https://pre-commit.com for more information
	# See https://pre-commit.com/hooks.html for more hooks

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    docs_dir = templates.globals["docs_dir"]
    import_name = templates.globals["import_name"]
    stubs_package = templates.globals["stubs_package"]

    non_source_files = [
        posixpath.join(docs_dir, "conf"), "__pkginfo__", "setup"
    ]

    domdfcoding_hooks = Repo(
        repo=make_github_url("domdfcoding", "pre-commit-hooks"),
        rev="v0.3.0",
        hooks=[
            {
                "id": "requirements-txt-sorter",
                "args": ["--allow-git"]
            },
            {
                "id":
                "check-docstring-first",
                "exclude":
                fr"^({'|'.join(non_source_files)}|{templates.globals['tests_dir']}/.*)\.py$"
            },
            "bind-requirements",
        ])

    flake8_dunder_all = Repo(
        repo=make_github_url("domdfcoding", "flake8-dunder-all"),
        rev="v0.1.8",
        hooks=[{
            "id":
            "ensure-dunder-all",
            "files":
            fr"^{import_name}{'-stubs' if stubs_package else ''}/.*\.py$"
        }])

    snippet_fmt = Repo(
        repo=make_github_url("python-formate", "snippet-fmt"),
        rev="v0.1.4",
        hooks=["snippet-fmt"],
    )

    formate_excludes = fr"^({'|'.join([*templates.globals['yapf_exclude'], *non_source_files])})\.(_)?py$"

    formate = Repo(
        repo=make_github_url("python-formate", "formate"),
        rev="v0.4.9",
        hooks=[{
            "id": "formate",
            "exclude": formate_excludes
        }],
    )

    dep_checker_args = [templates.globals["import_name"].replace('.', '/')]

    if templates.globals["source_dir"]:
        dep_checker_args.extend(
            ["--work-dir", templates.globals["source_dir"]])

    dep_checker = Repo(repo=make_github_url("domdfcoding", "dep_checker"),
                       rev="v0.6.2",
                       hooks=[{
                           "id": "dep_checker",
                           "args": dep_checker_args
                       }])

    pre_commit_file = PathPlus(repo_path / ".pre-commit-config.yaml")

    if not pre_commit_file.is_file():
        pre_commit_file.touch()

    dumper = ruamel.yaml.YAML()
    dumper.indent(mapping=2, sequence=3, offset=1)

    output = StringList([
        f"# {templates.globals['managed_message']}",
        "---",
        '',
        f"exclude: {templates.globals['pre_commit_exclude']}",
        '',
        "repos:",
    ])

    indent_re = re.compile("^ {3}")

    managed_hooks = [
        pyproject_parser,
        pre_commit_hooks,
        domdfcoding_hooks,
        flake8_dunder_all,
        flake2lint,
        pygrep_hooks,
        pyupgrade,
        lucas_c_hooks,
        snippet_fmt,
        formate,
    ]

    if not templates.globals["stubs_package"]:
        managed_hooks.append(dep_checker)

    managed_hooks_urls = [str(hook.repo) for hook in managed_hooks]

    custom_hooks_comment = "# Custom hooks can be added below this comment"

    for hook in managed_hooks:
        buf = StringIO()
        dumper.dump(hook.to_dict(), buf)
        output.append(indent_re.sub(" - ", indent(buf.getvalue(), "   ")))
        output.blankline(ensure_single=True)
    output.append(custom_hooks_comment)
    output.blankline(ensure_single=True)

    raw_yaml = pre_commit_file.read_text()

    if custom_hooks_comment in raw_yaml:
        custom_hooks_yaml = pre_commit_file.read_text().split(
            custom_hooks_comment)[1]

        custom_hooks = []
        local_hooks = []

        for repo in yaml_safe_loader.load(custom_hooks_yaml) or []:
            if repo["repo"] == "local":
                local_hooks.append(repo)

            elif repo["repo"] not in managed_hooks_urls:
                custom_hooks.append(Repo(**repo))

        for hook in custom_hooks:
            buf = StringIO()
            dumper.dump(hook.to_dict(), buf)
            output.append(indent_re.sub(" - ", indent(buf.getvalue(), "   ")))
            output.blankline(ensure_single=True)

        for hook in local_hooks:
            buf = StringIO()
            dumper.dump(hook, buf)
            output.append(indent_re.sub(" - ", indent(buf.getvalue(), "   ")))
            output.blankline(ensure_single=True)

    pre_commit_file.write_lines(output)

    return [pre_commit_file.name]
Exemple #24
0
def ensure_bumpversion(repo_path: pathlib.Path,
                       templates: Environment) -> List[str]:
    """
	Add configuration for ``bumpversion`` to the desired repo.

	https://pypi.org/project/bumpversion/

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    bumpversion_file = PathPlus(repo_path / ".bumpversion.cfg")

    if not bumpversion_file.is_file():
        bumpversion_file.write_lines([
            "[bumpversion]",
            f"current_version = {templates.globals['version']}",
            "commit = True",
            "tag = True",
        ])

    bv = ConfigUpdater()
    bv.read(str(bumpversion_file))

    old_sections = [
        "bumpversion:file:git_helper.yml", "bumpversion:file:__pkginfo__.py"
    ]
    required_sections = {
        f"bumpversion:file:{filename}"
        for filename in get_bumpversion_filenames(templates)
    }

    if not templates.globals["enable_docs"]:
        old_sections.append(
            f"bumpversion:file:{templates.globals['docs_dir']}/index.rst")

    if not templates.globals["enable_conda"]:
        old_sections.append(f"bumpversion:file:.github/workflows/conda_ci.yml")

    if templates.globals["use_whey"]:
        old_sections.append("bumpversion:file:setup.cfg")

    for section in old_sections:
        if section in bv.sections():
            bv.remove_section(section)
        if section in required_sections:
            required_sections.remove(section)

    for section in sorted(required_sections):
        if section not in bv.sections():
            bv.add_section(section)

    init_filename = get_init_filename(templates)
    if init_filename is not None:
        init_section = bv[f"bumpversion:file:{init_filename}"]
        if "search" not in init_section:
            init_section["search"] = ': str = "{current_version}"'
            init_section["replace"] = ': str = "{new_version}"'

    if "bumpversion:file:setup.cfg" in bv.sections():
        setup_cfg_section = bv["bumpversion:file:setup.cfg"]
        if ("search" not in setup_cfg_section or
            ("search" in setup_cfg_section and
             setup_cfg_section["search"].value == "name = {current_version}")):
            setup_cfg_section["search"] = "version = {current_version}"
            setup_cfg_section["replace"] = "version = {new_version}"

    if "bumpversion:file:pyproject.toml" in bv.sections():
        pp_toml_section = bv["bumpversion:file:pyproject.toml"]
        if "search" not in pp_toml_section:
            pp_toml_section["search"] = 'version = "{current_version}"'
            pp_toml_section["replace"] = 'version = "{new_version}"'

    bv["bumpversion"]["current_version"] = templates.globals["version"]
    bv["bumpversion"]["commit"] = "True"
    bv["bumpversion"]["tag"] = "True"

    bumpversion_file.write_clean(str(bv))

    return [bumpversion_file.name]
Exemple #25
0
def make_formate_toml(repo_path: pathlib.Path, templates: Environment) -> List[str]:
	"""
	Add configuration for ``formate``.

	https://formate.readthedocs.io

	:param repo_path: Path to the repository root.
	:param templates:
	"""

	known_third_party = set()

	isort_file = PathPlus(repo_path / ".isort.cfg")
	formate_file = PathPlus(repo_path / "formate.toml")

	isort_config = get_isort_config(repo_path, templates)
	known_third_party.update(isort_config["known_third_party"])

	if formate_file.is_file():
		formate_config = dom_toml.load(formate_file)
	else:
		formate_config = {}

	# Read the isort config file and get "known_third_party" from there
	if isort_file.is_file():
		isort = ConfigUpdater()
		isort.read(str(isort_file))

		if "settings" in isort.sections() and "known_third_party" in isort["settings"]:
			known_third_party.update(re.split(r"(\n|,\s*)", isort["settings"]["known_third_party"].value))

	isort_file.unlink(missing_ok=True)

	if "hooks" in formate_config and "isort" in formate_config["hooks"]:
		if "kwargs" in formate_config["hooks"]["isort"]:
			known_third_party.update(formate_config["hooks"]["isort"]["kwargs"].get("known_third_party", ()))

			for existing_key, value in formate_config["hooks"]["isort"]["kwargs"].items():
				if existing_key not in isort_config:
					isort_config[existing_key] = value

	def normalise_underscore(name: str) -> str:
		return normalize(name.strip()).replace('-', '_')

	isort_config["known_third_party"] = sorted(set(filter(bool, map(normalise_underscore, known_third_party))))

	hooks = {
			"dynamic_quotes": 10,
			"collections-import-rewrite": 20,
			"yapf": {"priority": 30, "kwargs": {"yapf_style": ".style.yapf"}},
			"reformat-generics": 40,
			"isort": {"priority": 50, "kwargs": isort_config},
			"noqa-reformat": 60,
			"ellipsis-reformat": 70,
			"squish_stubs": 80,
			}

	config = {"indent": '\t', "line_length": 115}

	formate_config["hooks"] = hooks
	formate_config["config"] = config

	formate_file = PathPlus(repo_path / "formate.toml")
	dom_toml.dump(formate_config, formate_file, encoder=dom_toml.TomlEncoder)

	return [formate_file.name, isort_file.name]