def check_file(self, filename: str, mode: black.Mode, kwargs: dict, *, data: bool = True) -> None: source, expected = read_data(filename, data=data) result: Result with TemporaryPathPlus() as tmp_pathplus: (tmp_pathplus / filename).write_text(source) toml_data = dom_toml.load( PathPlus(__file__).parent / "example_formate.toml") toml_data["hooks"]["black"]["kwargs"] = kwargs dom_toml.dump(toml_data, tmp_pathplus / "formate.toml") with in_directory(tmp_pathplus): runner = CliRunner(mix_stderr=False) result = runner.invoke( main, args=[ filename, "--no-colour", "--diff", "--verbose", "-v" ], ) # TODO: check stdout actual = (tmp_pathplus / filename).read_text() self.assertFormatEqual(expected, actual) if source != actual: black.assert_equivalent(source, actual) black.assert_stable(source, actual, mode)
def test_buildsystem_parser_errors(config: str, expects: Type[Exception], match: str, tmp_pathplus: PathPlus): (tmp_pathplus / "pyproject.toml").write_clean(config) with in_directory(tmp_pathplus), pytest.raises(expects, match=match): BuildSystemParser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["build-system"])
def test_load_decoder_none(self, tmp_pathplus: PathPlus): (tmp_pathplus / "config.toml").write_clean("hello = 'world'") with pytest.warns(DeprecationWarning, match=self.match): result = dom_toml.load(tmp_pathplus / "config.toml", decoder=None) # type: ignore[call-overload] assert result == self.expected
def load_toml(filename: PathLike) -> Dict[str, Any]: # TODO: TypedDict """ Load the ``mkrecipe`` configuration mapping from the given TOML file. :param filename: """ filename = PathPlus(filename) project_dir = filename.parent config = dom_toml.load(filename) parsed_config: Dict[str, Any] = {} tool_table = config.get("tool", {}) with in_directory(filename.parent): parsed_config.update(BuildSystemParser().parse(config.get( "build-system", {}), set_defaults=True)) parsed_config.update(whey.config.WheyParser().parse( tool_table.get("whey", {}))) parsed_config.update(MkrecipeParser().parse(tool_table.get( "mkrecipe", {}), set_defaults=True)) if "project" in config: parsed_config.update(PEP621Parser().parse(config["project"], set_defaults=True)) else: raise KeyError(f"'project' table not found in '{filename!s}'") # set defaults parsed_config.setdefault("package", config["project"]["name"].split('.', 1)[0]) parsed_config.setdefault("license-key", None) if "dependencies" in parsed_config.get("dynamic", []): if (project_dir / "requirements.txt").is_file(): dependencies = read_requirements(project_dir / "requirements.txt", include_invalid=True)[0] parsed_config["dependencies"] = sorted( combine_requirements(dependencies)) else: raise BadConfigError( "'project.dependencies' was listed as a dynamic field " "but no 'requirements.txt' file was found.") parsed_config["version"] = str(parsed_config["version"]) parsed_config["requires"] = sorted( set( combine_requirements( parsed_config["requires"], ComparableRequirement("setuptools"), ComparableRequirement("wheel"), ))) return parsed_config
def test_pep621_class_bad_config( config: str, expects: Type[Exception], match: str, tmp_pathplus: PathPlus, ): (tmp_pathplus / "pyproject.toml").write_clean(config) with in_directory(tmp_pathplus), pytest.raises(expects, match=match): PEP621Parser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["project"])
def test_pep621_subclass( toml_config: str, tmp_pathplus: PathPlus, advanced_data_regression: AdvancedDataRegressionFixture, ): (tmp_pathplus / "pyproject.toml").write_clean(toml_config) with in_directory(tmp_pathplus): config = ReducedPEP621Parser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["project"]) advanced_data_regression.check(config)
def test_parse_config_readme_errors(filename: str, tmp_pathplus: PathPlus): config = dedent(f""" [project] name = "spam" version = "2020.0.0" readme = "{filename}" """) (tmp_pathplus / "pyproject.toml").write_clean(config) (tmp_pathplus / filename).write_text("This is the readme.") with in_directory(tmp_pathplus), pytest.raises( ValueError, match=f"Unsupported extension for '{filename}'"): PEP621Parser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["project"])
def load_toml(filename: PathLike) -> ConfigDict: """ Load the ``importcheck`` configuration mapping from the given TOML file. :param filename: """ config = dom_toml.load(filename) if "importcheck" in config: return cast(ConfigDict, config["importcheck"]) elif "tool" in config and "importcheck" in config["tool"]: return cast(ConfigDict, config["tool"]["importcheck"]) else: raise KeyError("No such table 'importcheck' or 'tool.importcheck'")
def test_buildsystem_parser_valid_config( toml_config: str, tmp_pathplus: PathPlus, advanced_data_regression: AdvancedDataRegressionFixture, set_defaults: bool, ): (tmp_pathplus / "pyproject.toml").write_clean(toml_config) config = BuildSystemParser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["build-system"], set_defaults=set_defaults, ) config["requires"] = list(map(str, config["requires"])) # type: ignore advanced_data_regression.check(config)
def check( pyproject_file: "PathLike" = "pyproject.toml", parser_class: str = "pyproject_parser:PyProject", show_traceback: bool = False, ): """ Validate the given ``pyproject.toml`` file. """ # 3rd party import dom_toml from dom_toml.parser import BadConfigError from domdf_python_tools.paths import PathPlus from domdf_python_tools.words import Plural, word_join # this package from pyproject_parser.parsers import BuildSystemParser, PEP621Parser pyproject_file = PathPlus(pyproject_file) click.echo(f"Validating {str(pyproject_file)!r}") with handle_tracebacks(show_traceback, CustomTracebackHandler): parser: Type[PyProject] = resolve_class(parser_class, "parser-class") parser.load(filename=pyproject_file) raw_config = dom_toml.load(pyproject_file) _keys = Plural("key", "keys") def error_on_unknown(keys: Iterable[str], expected_keys: Iterable[str], table_name: str): unknown_keys = set(keys) - set(expected_keys) if unknown_keys: raise BadConfigError( f"Unknown {_keys(len(unknown_keys))} in '[{table_name}]': " f"{word_join(sorted(unknown_keys), use_repr=True)}", ) # Implements PEPs 517 and 518 error_on_unknown( raw_config.get("build-system", {}).keys(), BuildSystemParser.keys, "build-system") # Implements PEP 621 error_on_unknown( raw_config.get("project", {}).keys(), {*PEP621Parser.keys, "dynamic"}, "project")
def test_pep621_class_valid_config( toml_config: str, tmp_pathplus: PathPlus, advanced_data_regression: AdvancedDataRegressionFixture, set_defaults: bool, ): (tmp_pathplus / "pyproject.toml").write_clean(toml_config) with in_directory(tmp_pathplus): config = PEP621Parser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["project"], set_defaults=set_defaults, ) advanced_data_regression.check(config)
def test_pep621_class_bad_config_readme( readme: str, expected: str, exception: Type[Exception], tmp_pathplus: PathPlus, ): (tmp_pathplus / "pyproject.toml").write_lines([ "[project]", 'name = "spam"', 'version = "2020.0.0"', readme, ]) with in_directory(tmp_pathplus), pytest.raises(exception, match=expected): PEP621Parser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["project"])
def test_pep621_class_valid_config_license_dict( tmp_pathplus: PathPlus, advanced_data_regression: AdvancedDataRegressionFixture, ): (tmp_pathplus / "pyproject.toml").write_lines([ f'[project]', f'name = "spam"', f'version = "2020.0.0"', f'license = {{text = "This is the MIT License"}}', ]) with in_directory(tmp_pathplus): config = PEP621Parser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["project"]) advanced_data_regression.check(config)
def test_pep621_class_bad_config_license( license_key: str, expected: str, tmp_pathplus: PathPlus, ): (tmp_pathplus / "pyproject.toml").write_lines([ f'[project]', f'name = "spam"', f'version = "2020.0.0"', license_key, ]) with in_directory(tmp_pathplus), pytest.raises(BadConfigError, match=expected): PEP621Parser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["project"])
def make_conda_actions_ci(repo_path: pathlib.Path, templates: Environment) -> List[str]: """ Add configuration for testing conda packages on `GitHub Actions` to the desired repo. :param repo_path: Path to the repository root. :param templates: """ workflows_dir = PathPlus(repo_path / ".github" / "workflows") conda_ci_file = workflows_dir / "conda_ci.yml" if templates.globals["enable_conda"]: actions = templates.get_template("github_conda_ci.yml") workflows_dir.maybe_make(parents=True) def no_pypy_versions(versions): """ Returns the subset of ``versions`` which does not end with ``-dev``. :param versions: """ return [ v for v in no_dev_versions(versions) if "pypy" not in v.lower() ] pip_dependencies = ["whey-conda"] pyproject_file = PathPlus(repo_path / "pyproject.toml") if pyproject_file.is_file(): data: DefaultDict[str, Any] = DefaultDict(dom_toml.load(pyproject_file)) pip_dependencies.extend(data["build-system"]["requires"]) conda_ci_file.write_clean( actions.render(no_dev_versions=no_pypy_versions, pip_dependencies=pip_dependencies)) else: conda_ci_file.unlink(missing_ok=True) return [conda_ci_file.relative_to(repo_path).as_posix()]
def test_pep621_class_valid_config_readme( filename: str, tmp_pathplus: PathPlus, advanced_data_regression: AdvancedDataRegressionFixture, ): (tmp_pathplus / "pyproject.toml").write_lines([ "[project]", 'name = "spam"', 'version = "2020.0.0"', f'readme = {filename!r}', ]) (tmp_pathplus / filename).write_text("This is the readme.") with in_directory(tmp_pathplus): config = PEP621Parser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["project"]) advanced_data_regression.check(config)
def load_toml(filename: PathLike) -> FormateConfigDict: """ Load the ``formate`` configuration mapping from the given TOML file. :param filename: """ config = dom_toml.load(filename) if "formate" in config.get("tool", {}): config = config["tool"]["formate"] formate_config: FormateConfigDict = {} if "hooks" in config: formate_config["hooks"] = config["hooks"] if "config" in config: formate_config["config"] = config["config"] return formate_config
def test_pep621_class_valid_config_readme_dict( readme, tmp_pathplus: PathPlus, advanced_data_regression: AdvancedDataRegressionFixture, ): (tmp_pathplus / "pyproject.toml").write_lines([ "[project]", 'name = "spam"', 'version = "2020.0.0"', readme, ]) (tmp_pathplus / "README.rst").write_text("This is the reStructuredText README.") (tmp_pathplus / "README.md").write_text("This is the markdown README.") (tmp_pathplus / "README.txt").write_text("This is the plaintext README.") (tmp_pathplus / "README").write_text("This is the README.") with in_directory(tmp_pathplus): config = PEP621Parser().parse( dom_toml.load(tmp_pathplus / "pyproject.toml")["project"]) advanced_data_regression.check(config)
def write_pth_files(self): """ Write ``.pth`` files, and their associated files, into the build directory. .. latex:clearpage:: """ config = dom_toml.load(self.project_dir / "pyproject.toml") if "whey-pth" not in config.get("tool", {}): return parsed_config = WheyPthParser().parse(config["tool"]["whey-pth"], set_defaults=True) pth_filename = self.build_dir / parsed_config["name"] if not pth_filename.suffix == ".pth": pth_filename = pth_filename.with_suffix(".pth") pth_filename.write_clean(parsed_config["pth-content"]) self.report_written(pth_filename) self.parse_additional_files(*parsed_config["additional-wheel-files"])
def make_linux(self) -> PathPlus: """ Create, update or remove the Linux action, as appropriate. """ platform_name = "Linux" ci_file = self.workflows_dir / f"python_ci_{platform_name.lower()}.yml" if platform_name in self.templates.globals["platforms"]: conda_pip_dependencies = ["mkrecipe"] pyproject_file = PathPlus(self.repo_path / "pyproject.toml") if pyproject_file.is_file(): data: DefaultDict[str, Any] = DefaultDict( dom_toml.load(pyproject_file)) conda_pip_dependencies.extend(data["build-system"]["requires"]) ci_file.write_clean( self.actions.render( no_dev_versions=no_dev_versions, python_versions=set_gh_actions_versions( self.get_linux_ci_versions()), ci_platform=platform_ci_names[platform_name], ci_name=platform_name, dependency_lines=self.get_linux_ci_requirements(), gh_actions_versions=self.get_gh_actions_matrix(), code_file_filter=self._code_file_filter, run_on_tags=" tags:\n - '*'", is_experimental=self._is_experimental, conda_pip_dependencies=conda_pip_dependencies, )) elif ci_file.is_file(): ci_file.unlink() return ci_file
def test_decimal(): PLACES = Decimal(10)**-4 d = {'a': Decimal("0.1")} o: Dict[str, Any] = loads(dumps(d)) assert o == loads(dumps(o)) assert Decimal(o['a']).quantize(PLACES) == d['a'].quantize(PLACES) with pytest.raises(TypeError): loads(2) # type: ignore[call-overload] with pytest.raises(TypeError, match="expected str, bytes or os.PathLike object, not int"): load(2) # type: ignore[call-overload] with pytest.raises(TypeError, match="expected str, bytes or os.PathLike object, not list"): load([]) # type: ignore[call-overload] with pytest.raises( TypeError, match="argument should be a str object or an os.PathLike object returning str, not <class 'bytes'>" ): load(b"test.toml") # type: ignore[call-overload]
def test_dump(tmp_pathplus): dump(TEST_DICT, tmp_pathplus / "file.toml") dump(load(tmp_pathplus / "file.toml", dict_=OrderedDict), tmp_pathplus / "file2.toml") dump(load(tmp_pathplus / "file2.toml", dict_=OrderedDict), tmp_pathplus / "file3.toml") assert (tmp_pathplus / "file2.toml").read_text() == (tmp_pathplus / "file3.toml").read_text()
def make_formate_toml(repo_path: pathlib.Path, templates: Environment) -> List[str]: """ Add configuration for ``formate``. https://formate.readthedocs.io :param repo_path: Path to the repository root. :param templates: """ known_third_party = set() isort_file = PathPlus(repo_path / ".isort.cfg") formate_file = PathPlus(repo_path / "formate.toml") isort_config = get_isort_config(repo_path, templates) known_third_party.update(isort_config["known_third_party"]) if formate_file.is_file(): formate_config = dom_toml.load(formate_file) else: formate_config = {} # Read the isort config file and get "known_third_party" from there if isort_file.is_file(): isort = ConfigUpdater() isort.read(str(isort_file)) if "settings" in isort.sections() and "known_third_party" in isort["settings"]: known_third_party.update(re.split(r"(\n|,\s*)", isort["settings"]["known_third_party"].value)) isort_file.unlink(missing_ok=True) if "hooks" in formate_config and "isort" in formate_config["hooks"]: if "kwargs" in formate_config["hooks"]["isort"]: known_third_party.update(formate_config["hooks"]["isort"]["kwargs"].get("known_third_party", ())) for existing_key, value in formate_config["hooks"]["isort"]["kwargs"].items(): if existing_key not in isort_config: isort_config[existing_key] = value def normalise_underscore(name: str) -> str: return normalize(name.strip()).replace('-', '_') isort_config["known_third_party"] = sorted(set(filter(bool, map(normalise_underscore, known_third_party)))) hooks = { "dynamic_quotes": 10, "collections-import-rewrite": 20, "yapf": {"priority": 30, "kwargs": {"yapf_style": ".style.yapf"}}, "reformat-generics": 40, "isort": {"priority": 50, "kwargs": isort_config}, "noqa-reformat": 60, "ellipsis-reformat": 70, "squish_stubs": 80, } config = {"indent": '\t', "line_length": 115} formate_config["hooks"] = hooks formate_config["config"] = config formate_file = PathPlus(repo_path / "formate.toml") dom_toml.dump(formate_config, formate_file, encoder=dom_toml.TomlEncoder) return [formate_file.name, isort_file.name]
def load( cls: Type[_PP], filename: PathLike, set_defaults: bool = False, ) -> _PP: """ Load the ``pyproject.toml`` configuration mapping from the given file. :param filename: :param set_defaults: If :py:obj:`True`, passes ``set_defaults=True`` the :meth:`parse() <dom_toml.parser.AbstractConfigParser.parse>` method on :attr:`~.build_system_table_parser` and :attr:`~.project_table_parser`. """ filename = PathPlus(filename) project_dir = filename.parent config = dom_toml.load(filename) keys = set(config.keys()) build_system_table: Optional[BuildSystemDict] = None project_table: Optional[ProjectDict] = None tool_table: Dict[str, Dict[str, Any]] = {} with in_directory(project_dir): if "build-system" in config: build_system_table = cls.build_system_table_parser.parse( config["build-system"], set_defaults=set_defaults) keys.remove("build-system") if "project" in config: project_table = cls.project_table_parser.parse( config["project"], set_defaults=set_defaults) keys.remove("project") if "tool" in config: tool_table = config["tool"] keys.remove("tool") for tool_name, tool_subtable in tool_table.items(): if tool_name in cls.tool_parsers: tool_table[tool_name] = cls.tool_parsers[ tool_name].parse(tool_subtable) if keys: allowed_top_level = ("build-system", "project", "tool") for top_level_key in sorted(keys): if top_level_key in allowed_top_level: continue if normalize(top_level_key) in allowed_top_level: raise BadConfigError( f"Unexpected top-level key {top_level_key!r}. " f"Did you mean {normalize(top_level_key)!r}?", ) raise BadConfigError( f"Unexpected top-level key {top_level_key!r}. " f"Only {word_join(allowed_top_level, use_repr=True)} are allowed.", ) return cls( build_system=build_system_table, project=project_table, tool=tool_table, )
def make_pyproject(repo_path: pathlib.Path, templates: Environment) -> List[str]: """ Create the ``pyproject.toml`` file for :pep:`517`. :param repo_path: Path to the repository root. :param templates: """ pyproject_file = PathPlus(repo_path / "pyproject.toml") data: DefaultDict[str, Any] if pyproject_file.is_file(): data = DefaultDict(dom_toml.load(pyproject_file)) else: data = DefaultDict() data.set_default("build-system", {}) build_backend = "setuptools.build_meta" build_requirements_ = { "setuptools>=40.6.0", "wheel>=0.34.2", "whey", "repo-helper", *templates.globals["tox_build_requirements"], *data["build-system"].get("requires", []) } build_requirements = sorted( combine_requirements( ComparableRequirement(req) for req in build_requirements_)) if templates.globals["use_whey"]: for old_dep in ["setuptools", "wheel"]: if old_dep in build_requirements: build_requirements.remove(old_dep) # type: ignore if templates.globals["use_whey"]: build_backend = "whey" elif "whey" in build_requirements: build_requirements.remove("whey") # type: ignore if "repo-helper" in build_requirements: build_requirements.remove("repo-helper") # type: ignore data["build-system"]["requires"] = list(map(str, build_requirements)) data["build-system"]["build-backend"] = build_backend data["project"] = DefaultDict(data.get("project", {})) data["project"]["name"] = templates.globals["pypi_name"] data["project"]["version"] = templates.globals["version"] data["project"]["description"] = templates.globals["short_desc"] data["project"]["readme"] = "README.rst" data["project"]["keywords"] = sorted(templates.globals["keywords"]) data["project"]["dynamic"] = [ "requires-python", "classifiers", "dependencies" ] data["project"]["authors"] = [{ "name": templates.globals["author"], "email": templates.globals["email"] }] data["project"]["license"] = {"file": "LICENSE"} if templates.globals["requires_python"] is not None: data["project"]["dynamic"].pop(0) data["project"][ "requires-python"] = f">={templates.globals['requires_python']}" url = "https://github.com/{username}/{repo_name}".format_map( templates.globals) data["project"]["urls"] = { "Homepage": url, "Issue Tracker": "https://github.com/{username}/{repo_name}/issues".format_map( templates.globals), "Source Code": url, } if templates.globals["enable_docs"]: data["project"]["urls"]["Documentation"] = templates.globals[ "docs_url"] # extras-require data["project"]["optional-dependencies"] = {} for extra, dependencies in templates.globals["extras_require"].items(): data["project"]["optional-dependencies"][extra] = list( map(str, dependencies)) if not data["project"]["optional-dependencies"]: del data["project"]["optional-dependencies"] # entry-points if templates.globals["console_scripts"]: data["project"]["scripts"] = dict( split_entry_point(e) for e in templates.globals["console_scripts"]) data["project"]["entry-points"] = {} for group, entry_points in templates.globals["entry_points"].items(): data["project"]["entry-points"][group] = dict( split_entry_point(e) for e in entry_points) if not data["project"]["entry-points"]: del data["project"]["entry-points"] # tool data.set_default("tool", {}) # tool.mkrecipe if templates.globals["enable_conda"]: data["tool"].setdefault("mkrecipe", {}) data["tool"]["mkrecipe"]["conda-channels"] = templates.globals[ "conda_channels"] if templates.globals["conda_extras"] in (["none"], ["all"]): data["tool"]["mkrecipe"]["extras"] = templates.globals[ "conda_extras"][0] else: data["tool"]["mkrecipe"]["extras"] = templates.globals[ "conda_extras"] else: if "mkrecipe" in data["tool"]: del data["tool"]["mkrecipe"] # tool.whey data["tool"].setdefault("whey", {}) data["tool"]["whey"]["base-classifiers"] = templates.globals["classifiers"] python_versions = set() python_implementations = set() for py_version in templates.globals["python_versions"]: py_version = str(py_version) if pre_release_re.match(py_version): continue pypy_version_m = _pypy_version_re.match(py_version) if py_version.startswith('3'): python_versions.add(py_version) python_implementations.add("CPython") elif pypy_version_m: python_implementations.add("PyPy") python_versions.add(f"3.{pypy_version_m.group(1)}") data["tool"]["whey"]["python-versions"] = natsorted(python_versions) data["tool"]["whey"]["python-implementations"] = sorted( python_implementations) data["tool"]["whey"]["platforms"] = templates.globals["platforms"] license_ = templates.globals["license"] data["tool"]["whey"]["license-key"] = { v: k for k, v in license_lookup.items() }.get(license_, license_) if templates.globals["source_dir"]: raise NotImplementedError( "Whey does not support custom source directories") elif templates.globals["import_name"] != templates.globals["pypi_name"]: if templates.globals["stubs_package"]: data["tool"]["whey"]["package"] = "{import_name}-stubs".format_map( templates.globals) else: data["tool"]["whey"]["package"] = posixpath.join( # templates.globals["source_dir"], templates.globals["import_name"].split('.', 1)[0], ) if templates.globals["manifest_additional"]: data["tool"]["whey"]["additional-files"] = templates.globals[ "manifest_additional"] elif "additional-files" in data["tool"]["whey"]: del data["tool"]["whey"]["additional-files"] if not templates.globals["enable_tests"] and not templates.globals[ "stubs_package"]: data["tool"]["importcheck"] = data["tool"].get("importcheck", {}) if templates.globals["enable_docs"]: data["tool"]["sphinx-pyproject"] = make_sphinx_config_dict(templates) else: data["tool"].pop("sphinx-pyproject", None) # [tool.mypy] # This is added regardless of the supported mypy version. # It isn't removed from setup.cfg unless the version is 0.901 or above data["tool"].setdefault("mypy", {}) data["tool"]["mypy"].update(_get_mypy_config(templates.globals)) if templates.globals["mypy_plugins"]: data["tool"]["mypy"]["plugins"] = templates.globals["mypy_plugins"] # [tool.dependency-dash] data["tool"].setdefault("dependency-dash", {}) data["tool"]["dependency-dash"]["requirements.txt"] = {"order": 10} if templates.globals["enable_tests"]: data["tool"]["dependency-dash"]["tests/requirements.txt"] = { "order": 20, "include": False, } if templates.globals["enable_docs"]: data["tool"]["dependency-dash"]["doc-source/requirements.txt"] = { "order": 30, "include": False, } # [tool.snippet-fmt] data["tool"].setdefault("snippet-fmt", {}) data["tool"]["snippet-fmt"].setdefault("languages", {}) data["tool"]["snippet-fmt"].setdefault("directives", ["code-block"]) data["tool"]["snippet-fmt"]["languages"]["python"] = {"reformat": True} data["tool"]["snippet-fmt"]["languages"]["TOML"] = {"reformat": True} data["tool"]["snippet-fmt"]["languages"]["ini"] = {} data["tool"]["snippet-fmt"]["languages"]["json"] = {} if not data["tool"]: del data["tool"] # TODO: managed message dom_toml.dump(data, pyproject_file, encoder=dom_toml.TomlEncoder) return [pyproject_file.name]
def test_paths(): load(test_toml) load(pathlib.Path(test_toml))
def test_nonexistent(): load(test_toml) with pytest.raises(FileNotFoundError, match=r"No such file or directory: .*'nonexist.toml'\)?"): load("nonexist.toml")
def test_parse_config_errors(config: str, expects: Type[Exception], match: str, tmp_pathplus: PathPlus): (tmp_pathplus / "pyproject.toml").write_clean(config) with pytest.raises(expects, match=match): PEP621Parser().parse(load(tmp_pathplus / "pyproject.toml")["project"])
def typed(): """ Add a 'py.typed' file and the associated trove classifier. """ # 3rd party from domdf_python_tools.paths import PathPlus from domdf_python_tools.stringlist import StringList from natsort import natsorted # this package from repo_helper.configupdater2 import ConfigUpdater from repo_helper.core import RepoHelper from repo_helper.utils import indent_join, stage_changes rh = RepoHelper(PathPlus.cwd()) rh.load_settings() py_typed = rh.target_repo / rh.templates.globals["import_name"] / "py.typed" if not py_typed.is_file(): py_typed.touch() stage_changes(rh.target_repo, [py_typed]) setup_cfg = rh.target_repo / "setup.cfg" pyproject_file = rh.target_repo / "pyproject.toml" if setup_cfg.is_file() and not rh.templates.globals["use_whey"]: content = setup_cfg.read_text() config = ConfigUpdater() config.read_string(content) existing_classifiers = config["metadata"]["classifiers"] existing_classifiers_string = str(existing_classifiers) classifiers = set( map(str.strip, existing_classifiers.value.split('\n'))) classifiers.add("Typing :: Typed") new_classifiers_lines = StringList( indent_join(natsorted(classifiers)).expandtabs(4)) new_classifiers_lines[0] = "classifiers =" new_classifiers_lines.blankline(ensure_single=True) setup_cfg.write_clean( content.replace(existing_classifiers_string, str(new_classifiers_lines))) if pyproject_file.is_file() and rh.templates.globals["use_whey"]: pyproject_config = dom_toml.load(pyproject_file) if "whey" in pyproject_config.get("tool", {}): classifiers = set( pyproject_config["tool"]["whey"]["base-classifiers"]) classifiers.add("Typing :: Typed") pyproject_config["tool"]["whey"]["base-classifiers"] = natsorted( classifiers) dom_toml.dump(pyproject_config, pyproject_file, encoder=dom_toml.TomlEncoder)