コード例 #1
0
    def check_file(self,
                   filename: str,
                   mode: black.Mode,
                   kwargs: dict,
                   *,
                   data: bool = True) -> None:
        source, expected = read_data(filename, data=data)

        result: Result

        with TemporaryPathPlus() as tmp_pathplus:
            (tmp_pathplus / filename).write_text(source)
            toml_data = dom_toml.load(
                PathPlus(__file__).parent / "example_formate.toml")
            toml_data["hooks"]["black"]["kwargs"] = kwargs
            dom_toml.dump(toml_data, tmp_pathplus / "formate.toml")

            with in_directory(tmp_pathplus):
                runner = CliRunner(mix_stderr=False)
                result = runner.invoke(
                    main,
                    args=[
                        filename, "--no-colour", "--diff", "--verbose", "-v"
                    ],
                )

            # TODO: check stdout
            actual = (tmp_pathplus / filename).read_text()

        self.assertFormatEqual(expected, actual)
        if source != actual:
            black.assert_equivalent(source, actual)
            black.assert_stable(source, actual, mode)
コード例 #2
0
def test_bad_config_cli(tmp_pathplus: PathPlus, config: Dict, match: str):
    dom_toml.dump({"project": config}, tmp_pathplus / "pyproject.toml")

    with in_directory(tmp_pathplus):
        runner = CliRunner()
        result: Result = runner.invoke(main)
        assert result.exit_code == 1
        assert match in result.stdout
コード例 #3
0
def test_bad_config_cli_traceback(tmp_pathplus: PathPlus, config: Dict,
                                  match: str):
    dom_toml.dump({"project": config}, tmp_pathplus / "pyproject.toml")

    with in_directory(tmp_pathplus):
        runner = CliRunner()

        with pytest.raises(BadConfigError, match=match):
            runner.invoke(main, args=["-T"])
コード例 #4
0
    def test_dump_encoder_none(self, tmp_pathplus: PathPlus):

        with pytest.warns(DeprecationWarning, match=self.match):
            dom_toml.dump(
                {"hello": "world"},
                filename=tmp_pathplus / "config.toml",
                encoder=None,  # type: ignore[arg-type]
            )

        assert (tmp_pathplus / "config.toml").read_text() == self.expected
コード例 #5
0
def test_no_requirements_txt(tmp_pathplus: PathPlus):
    dom_toml.dump({"project": {
        "name": "foo",
        "dynamic": ["dependencies"]
    }}, tmp_pathplus / "pyproject.toml")

    with pytest.raises(
            BadConfigError,
            match=
            "'project.dependencies' was listed as a dynamic field but no 'requirements.txt' file was found."
    ):
        load_toml(tmp_pathplus / "pyproject.toml")
コード例 #6
0
def make_pyproject(repo_path: pathlib.Path,
                   templates: Environment) -> List[str]:
    """
	Create the ``pyproject.toml`` file for :pep:`517`.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    pyproject_file = PathPlus(repo_path / "pyproject.toml")

    data: DefaultDict[str, Any]

    if pyproject_file.is_file():
        data = DefaultDict(dom_toml.load(pyproject_file))
    else:
        data = DefaultDict()

    data.set_default("build-system", {})
    build_backend = "setuptools.build_meta"

    build_requirements_ = {
        "setuptools>=40.6.0", "wheel>=0.34.2", "whey", "repo-helper",
        *templates.globals["tox_build_requirements"],
        *data["build-system"].get("requires", [])
    }

    build_requirements = sorted(
        combine_requirements(
            ComparableRequirement(req) for req in build_requirements_))

    if templates.globals["use_whey"]:
        for old_dep in ["setuptools", "wheel"]:
            if old_dep in build_requirements:
                build_requirements.remove(old_dep)  # type: ignore

    if templates.globals["use_whey"]:
        build_backend = "whey"
    elif "whey" in build_requirements:
        build_requirements.remove("whey")  # type: ignore

    if "repo-helper" in build_requirements:
        build_requirements.remove("repo-helper")  # type: ignore

    data["build-system"]["requires"] = list(map(str, build_requirements))
    data["build-system"]["build-backend"] = build_backend

    data["project"] = DefaultDict(data.get("project", {}))
    data["project"]["name"] = templates.globals["pypi_name"]
    data["project"]["version"] = templates.globals["version"]
    data["project"]["description"] = templates.globals["short_desc"]
    data["project"]["readme"] = "README.rst"
    data["project"]["keywords"] = sorted(templates.globals["keywords"])
    data["project"]["dynamic"] = [
        "requires-python", "classifiers", "dependencies"
    ]
    data["project"]["authors"] = [{
        "name": templates.globals["author"],
        "email": templates.globals["email"]
    }]
    data["project"]["license"] = {"file": "LICENSE"}

    if templates.globals["requires_python"] is not None:
        data["project"]["dynamic"].pop(0)
        data["project"][
            "requires-python"] = f">={templates.globals['requires_python']}"

    url = "https://github.com/{username}/{repo_name}".format_map(
        templates.globals)
    data["project"]["urls"] = {
        "Homepage":
        url,
        "Issue Tracker":
        "https://github.com/{username}/{repo_name}/issues".format_map(
            templates.globals),
        "Source Code":
        url,
    }

    if templates.globals["enable_docs"]:
        data["project"]["urls"]["Documentation"] = templates.globals[
            "docs_url"]

    # extras-require

    data["project"]["optional-dependencies"] = {}

    for extra, dependencies in templates.globals["extras_require"].items():
        data["project"]["optional-dependencies"][extra] = list(
            map(str, dependencies))

    if not data["project"]["optional-dependencies"]:
        del data["project"]["optional-dependencies"]

    # entry-points

    if templates.globals["console_scripts"]:
        data["project"]["scripts"] = dict(
            split_entry_point(e) for e in templates.globals["console_scripts"])

    data["project"]["entry-points"] = {}

    for group, entry_points in templates.globals["entry_points"].items():
        data["project"]["entry-points"][group] = dict(
            split_entry_point(e) for e in entry_points)

    if not data["project"]["entry-points"]:
        del data["project"]["entry-points"]

    # tool
    data.set_default("tool", {})

    # tool.mkrecipe
    if templates.globals["enable_conda"]:
        data["tool"].setdefault("mkrecipe", {})
        data["tool"]["mkrecipe"]["conda-channels"] = templates.globals[
            "conda_channels"]

        if templates.globals["conda_extras"] in (["none"], ["all"]):
            data["tool"]["mkrecipe"]["extras"] = templates.globals[
                "conda_extras"][0]
        else:
            data["tool"]["mkrecipe"]["extras"] = templates.globals[
                "conda_extras"]
    else:
        if "mkrecipe" in data["tool"]:
            del data["tool"]["mkrecipe"]

    # tool.whey
    data["tool"].setdefault("whey", {})

    data["tool"]["whey"]["base-classifiers"] = templates.globals["classifiers"]

    python_versions = set()
    python_implementations = set()

    for py_version in templates.globals["python_versions"]:
        py_version = str(py_version)

        if pre_release_re.match(py_version):
            continue

        pypy_version_m = _pypy_version_re.match(py_version)

        if py_version.startswith('3'):
            python_versions.add(py_version)
            python_implementations.add("CPython")

        elif pypy_version_m:
            python_implementations.add("PyPy")
            python_versions.add(f"3.{pypy_version_m.group(1)}")

    data["tool"]["whey"]["python-versions"] = natsorted(python_versions)
    data["tool"]["whey"]["python-implementations"] = sorted(
        python_implementations)

    data["tool"]["whey"]["platforms"] = templates.globals["platforms"]

    license_ = templates.globals["license"]
    data["tool"]["whey"]["license-key"] = {
        v: k
        for k, v in license_lookup.items()
    }.get(license_, license_)

    if templates.globals["source_dir"]:
        raise NotImplementedError(
            "Whey does not support custom source directories")

    elif templates.globals["import_name"] != templates.globals["pypi_name"]:
        if templates.globals["stubs_package"]:
            data["tool"]["whey"]["package"] = "{import_name}-stubs".format_map(
                templates.globals)
        else:
            data["tool"]["whey"]["package"] = posixpath.join(
                # templates.globals["source_dir"],
                templates.globals["import_name"].split('.', 1)[0], )

    if templates.globals["manifest_additional"]:
        data["tool"]["whey"]["additional-files"] = templates.globals[
            "manifest_additional"]
    elif "additional-files" in data["tool"]["whey"]:
        del data["tool"]["whey"]["additional-files"]

    if not templates.globals["enable_tests"] and not templates.globals[
            "stubs_package"]:
        data["tool"]["importcheck"] = data["tool"].get("importcheck", {})

    if templates.globals["enable_docs"]:
        data["tool"]["sphinx-pyproject"] = make_sphinx_config_dict(templates)
    else:
        data["tool"].pop("sphinx-pyproject", None)

    # [tool.mypy]
    # This is added regardless of the supported mypy version.
    # It isn't removed from setup.cfg unless the version is 0.901 or above
    data["tool"].setdefault("mypy", {})

    data["tool"]["mypy"].update(_get_mypy_config(templates.globals))

    if templates.globals["mypy_plugins"]:
        data["tool"]["mypy"]["plugins"] = templates.globals["mypy_plugins"]

    # [tool.dependency-dash]
    data["tool"].setdefault("dependency-dash", {})
    data["tool"]["dependency-dash"]["requirements.txt"] = {"order": 10}

    if templates.globals["enable_tests"]:
        data["tool"]["dependency-dash"]["tests/requirements.txt"] = {
            "order": 20,
            "include": False,
        }

    if templates.globals["enable_docs"]:
        data["tool"]["dependency-dash"]["doc-source/requirements.txt"] = {
            "order": 30,
            "include": False,
        }

    # [tool.snippet-fmt]
    data["tool"].setdefault("snippet-fmt", {})
    data["tool"]["snippet-fmt"].setdefault("languages", {})
    data["tool"]["snippet-fmt"].setdefault("directives", ["code-block"])

    data["tool"]["snippet-fmt"]["languages"]["python"] = {"reformat": True}
    data["tool"]["snippet-fmt"]["languages"]["TOML"] = {"reformat": True}
    data["tool"]["snippet-fmt"]["languages"]["ini"] = {}
    data["tool"]["snippet-fmt"]["languages"]["json"] = {}

    if not data["tool"]:
        del data["tool"]

    # TODO: managed message
    dom_toml.dump(data, pyproject_file, encoder=dom_toml.TomlEncoder)

    return [pyproject_file.name]
コード例 #7
0
ファイル: test_api.py プロジェクト: domdfcoding/dom_toml
def test_dump(tmp_pathplus):
	dump(TEST_DICT, tmp_pathplus / "file.toml")
	dump(load(tmp_pathplus / "file.toml", dict_=OrderedDict), tmp_pathplus / "file2.toml")
	dump(load(tmp_pathplus / "file2.toml", dict_=OrderedDict), tmp_pathplus / "file3.toml")

	assert (tmp_pathplus / "file2.toml").read_text() == (tmp_pathplus / "file3.toml").read_text()
コード例 #8
0
ファイル: testing.py プロジェクト: repo-helper/repo_helper
def make_formate_toml(repo_path: pathlib.Path, templates: Environment) -> List[str]:
	"""
	Add configuration for ``formate``.

	https://formate.readthedocs.io

	:param repo_path: Path to the repository root.
	:param templates:
	"""

	known_third_party = set()

	isort_file = PathPlus(repo_path / ".isort.cfg")
	formate_file = PathPlus(repo_path / "formate.toml")

	isort_config = get_isort_config(repo_path, templates)
	known_third_party.update(isort_config["known_third_party"])

	if formate_file.is_file():
		formate_config = dom_toml.load(formate_file)
	else:
		formate_config = {}

	# Read the isort config file and get "known_third_party" from there
	if isort_file.is_file():
		isort = ConfigUpdater()
		isort.read(str(isort_file))

		if "settings" in isort.sections() and "known_third_party" in isort["settings"]:
			known_third_party.update(re.split(r"(\n|,\s*)", isort["settings"]["known_third_party"].value))

	isort_file.unlink(missing_ok=True)

	if "hooks" in formate_config and "isort" in formate_config["hooks"]:
		if "kwargs" in formate_config["hooks"]["isort"]:
			known_third_party.update(formate_config["hooks"]["isort"]["kwargs"].get("known_third_party", ()))

			for existing_key, value in formate_config["hooks"]["isort"]["kwargs"].items():
				if existing_key not in isort_config:
					isort_config[existing_key] = value

	def normalise_underscore(name: str) -> str:
		return normalize(name.strip()).replace('-', '_')

	isort_config["known_third_party"] = sorted(set(filter(bool, map(normalise_underscore, known_third_party))))

	hooks = {
			"dynamic_quotes": 10,
			"collections-import-rewrite": 20,
			"yapf": {"priority": 30, "kwargs": {"yapf_style": ".style.yapf"}},
			"reformat-generics": 40,
			"isort": {"priority": 50, "kwargs": isort_config},
			"noqa-reformat": 60,
			"ellipsis-reformat": 70,
			"squish_stubs": 80,
			}

	config = {"indent": '\t', "line_length": 115}

	formate_config["hooks"] = hooks
	formate_config["config"] = config

	formate_file = PathPlus(repo_path / "formate.toml")
	dom_toml.dump(formate_config, formate_file, encoder=dom_toml.TomlEncoder)

	return [formate_file.name, isort_file.name]
コード例 #9
0
ファイル: add.py プロジェクト: repo-helper/repo_helper
def typed():
    """
	Add a 'py.typed' file and the associated trove classifier.
	"""

    # 3rd party
    from domdf_python_tools.paths import PathPlus
    from domdf_python_tools.stringlist import StringList
    from natsort import natsorted

    # this package
    from repo_helper.configupdater2 import ConfigUpdater
    from repo_helper.core import RepoHelper
    from repo_helper.utils import indent_join, stage_changes

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()

    py_typed = rh.target_repo / rh.templates.globals["import_name"] / "py.typed"
    if not py_typed.is_file():
        py_typed.touch()

    stage_changes(rh.target_repo, [py_typed])

    setup_cfg = rh.target_repo / "setup.cfg"
    pyproject_file = rh.target_repo / "pyproject.toml"

    if setup_cfg.is_file() and not rh.templates.globals["use_whey"]:
        content = setup_cfg.read_text()

        config = ConfigUpdater()
        config.read_string(content)

        existing_classifiers = config["metadata"]["classifiers"]
        existing_classifiers_string = str(existing_classifiers)

        classifiers = set(
            map(str.strip, existing_classifiers.value.split('\n')))
        classifiers.add("Typing :: Typed")

        new_classifiers_lines = StringList(
            indent_join(natsorted(classifiers)).expandtabs(4))
        new_classifiers_lines[0] = "classifiers ="
        new_classifiers_lines.blankline(ensure_single=True)

        setup_cfg.write_clean(
            content.replace(existing_classifiers_string,
                            str(new_classifiers_lines)))

    if pyproject_file.is_file() and rh.templates.globals["use_whey"]:
        pyproject_config = dom_toml.load(pyproject_file)
        if "whey" in pyproject_config.get("tool", {}):
            classifiers = set(
                pyproject_config["tool"]["whey"]["base-classifiers"])
            classifiers.add("Typing :: Typed")
            pyproject_config["tool"]["whey"]["base-classifiers"] = natsorted(
                classifiers)

        dom_toml.dump(pyproject_config,
                      pyproject_file,
                      encoder=dom_toml.TomlEncoder)