示例#1
0
def requirements_from_file(
    package_root: pathlib.Path,
    options: Dict,
    env: sphinx.environment.BuildEnvironment,
    extra: str,
) -> List[str]:
    """
	Load requirements from the specified file.

	:param package_root: The path to the package root
	:param options:
	:param env:
	:param extra: The name of the "extra" that the requirements are for

	:return: List of requirements
	"""

    requirements_file = package_root / options["file"]

    if not requirements_file.is_file():
        raise FileNotFoundError(
            f"Cannot find requirements file '{requirements_file}'")

    mime_type = mimetypes.guess_type(str(requirements_file))[0]
    if not mime_type or not mime_type.startswith("text/"):
        raise ValueError(f"'{requirements_file}' is not a text file.")

    requirements, comments = read_requirements(
        requirements_file,
        normalize_func=normalize_keep_dot,
    )

    return list(map(str, sorted(combine_requirements(requirements))))
示例#2
0
    def merge_requirements(self) -> List[str]:
        current_requirements_, comments, invalid_lines = read_requirements(
            self.req_file, include_invalid=True)

        current_requirements = list(current_requirements_)
        current_requirements.append(ComparableRequirement("sphinx>=3.0.3"))

        for line in invalid_lines:
            if line.startswith("git+"):
                if line == "git+https://github.com/sphinx-toolbox/html-section.git":
                    continue
                comments.append(line)
            else:
                warnings.warn(f"Ignored invalid requirement {line!r}")

        other_themes = list(self.theme_versions.keys())
        theme_name = normalize(self._globals["sphinx_html_theme"])
        if theme_name in other_themes:
            other_themes.remove(theme_name)

        for req in current_requirements:
            req.name = normalize(req.name)
            # if req.name not in self.get_target_requirement_names() and req.name not in self.theme_versions.keys():
            if req.name not in other_themes:
                if req.name in {"sphinx-autodoc-typehints", "autodocsumm"}:
                    continue
                else:
                    self.target_requirements.add(req)

        self.target_requirements = set(
            combine_requirements(self.target_requirements))

        return comments
示例#3
0
def get_isort_config(repo_path: pathlib.Path, templates: Environment) -> Dict[str, Any]:
	"""
	Returns a ``key: value`` mapping of configuration for ``isort``.

	https://github.com/timothycrosley/isort

	:param repo_path: Path to the repository root.
	:param templates:
	"""

	isort: Dict[str, Any] = {}

	isort["indent"] = "\t\t"  # To match what yapf uses

	# Undocumented 8th option with the closing bracket indented
	isort["multi_line_output"] = 8
	isort["import_heading_stdlib"] = "stdlib"
	isort["import_heading_thirdparty"] = "3rd party"
	isort["import_heading_firstparty"] = "this package"
	isort["import_heading_localfolder"] = "this package"
	isort["balanced_wrapping"] = False
	isort["lines_between_types"] = 0
	isort["use_parentheses"] = True
	# isort["float_to_top"] = True  # TODO: Doesn't work properly; No imports get sorted or floated to the top
	isort["remove_redundant_aliases"] = True
	isort["default_section"] = "THIRDPARTY"

	if templates.globals["enable_tests"]:
		test_requirements = read_requirements(
				repo_path / templates.globals["tests_dir"] / "requirements.txt",
				include_invalid=True,
				)[0]
	else:
		test_requirements = set()

	main_requirements = read_requirements(repo_path / "requirements.txt")[0]

	all_requirements = set(map(normalize, map(attrgetter("name"), (*test_requirements, *main_requirements))))
	all_requirements.discard(templates.globals["import_name"])
	all_requirements.discard("iniconfig")

	known_third_party = [req.replace('-', '_') for req in sorted(all_requirements)]
	isort["known_third_party"] = known_third_party
	isort["known_first_party"] = templates.globals["import_name"]

	return isort
示例#4
0
def load_toml(filename: PathLike) -> Dict[str, Any]:  # TODO: TypedDict
    """
	Load the ``mkrecipe`` configuration mapping from the given TOML file.

	:param filename:
	"""

    filename = PathPlus(filename)

    project_dir = filename.parent
    config = dom_toml.load(filename)

    parsed_config: Dict[str, Any] = {}
    tool_table = config.get("tool", {})

    with in_directory(filename.parent):

        parsed_config.update(BuildSystemParser().parse(config.get(
            "build-system", {}),
                                                       set_defaults=True))
        parsed_config.update(whey.config.WheyParser().parse(
            tool_table.get("whey", {})))
        parsed_config.update(MkrecipeParser().parse(tool_table.get(
            "mkrecipe", {}),
                                                    set_defaults=True))

        if "project" in config:
            parsed_config.update(PEP621Parser().parse(config["project"],
                                                      set_defaults=True))
        else:
            raise KeyError(f"'project' table not found in '{filename!s}'")

    # set defaults
    parsed_config.setdefault("package",
                             config["project"]["name"].split('.', 1)[0])
    parsed_config.setdefault("license-key", None)

    if "dependencies" in parsed_config.get("dynamic", []):
        if (project_dir / "requirements.txt").is_file():
            dependencies = read_requirements(project_dir / "requirements.txt",
                                             include_invalid=True)[0]
            parsed_config["dependencies"] = sorted(
                combine_requirements(dependencies))
        else:
            raise BadConfigError(
                "'project.dependencies' was listed as a dynamic field "
                "but no 'requirements.txt' file was found.")

    parsed_config["version"] = str(parsed_config["version"])
    parsed_config["requires"] = sorted(
        set(
            combine_requirements(
                parsed_config["requires"],
                ComparableRequirement("setuptools"),
                ComparableRequirement("wheel"),
            )))

    return parsed_config
示例#5
0
def requirement(requirement: str, file: Optional[str] = None) -> int:
    """
	Add a requirement.
	"""

    # 3rd party
    from consolekit.utils import abort
    from domdf_python_tools.paths import PathPlus, traverse_to_file
    from domdf_python_tools.stringlist import StringList
    from packaging.requirements import InvalidRequirement
    from packaging.specifiers import SpecifierSet
    from shippinglabel import normalize_keep_dot
    from shippinglabel.requirements import ComparableRequirement, combine_requirements, read_requirements

    repo_dir: PathPlus = traverse_to_file(PathPlus.cwd(), "repo_helper.yml",
                                          "git_helper.yml")

    if file is None:
        requirements_file = repo_dir / "requirements.txt"

        if not requirements_file.is_file():
            raise abort(f"'{file}' not found.")

    else:
        requirements_file = PathPlus(file)

        if not requirements_file.is_file():
            raise abort("'requirements.txt' not found.")

    try:
        req = ComparableRequirement(requirement)
    except InvalidRequirement as e:
        raise BadRequirement(requirement, e)

    response = (PYPI_API / req.name / "json/").get()
    if response.status_code != 200:
        raise click.BadParameter(f"No such project {req.name}")
    else:
        req.name = normalize(response.json()["info"]["name"])
        if not req.specifier:
            req.specifier = SpecifierSet(
                f">={response.json()['info']['version']}")

        click.echo(f"Adding requirement '{req}'")

    requirements, comments, invalid_lines = read_requirements(
        req_file=requirements_file,
        include_invalid=True,
        normalize_func=normalize_keep_dot,
    )

    requirements.add(req)

    buf = StringList([*comments, *invalid_lines])
    buf.extend(str(req) for req in sorted(combine_requirements(requirements)))
    requirements_file.write_lines(buf)

    return 0
示例#6
0
def sort_requirements(filename: PathLike, allow_git: bool = False) -> int:
    """
	Sort the requirements in the given file alphabetically.

	:param filename: The file to sort the requirements in.
	:param allow_git: Whether to allow lines that start with ``git+``, which are allowed by pip but not :pep:`508`.
	"""

    ret = PASS
    filename = PathPlus(filename)
    comments: List[str]
    requirements: Set[ComparableRequirement]
    git_lines: List[str] = []

    requirements, comments, invalid_lines = read_requirements(
        req_file=filename,
        include_invalid=True,
        normalize_func=normalize_keep_dot,
    )

    for line in invalid_lines:
        if line.startswith("git+") and allow_git:
            git_lines.append(line)
        else:
            ret |= FAIL

    # find and remove pkg-resources==0.0.0
    # which is automatically added by broken pip package under Debian
    if ComparableRequirement("pkg-resources==0.0.0") in requirements:
        requirements.remove(ComparableRequirement("pkg-resources==0.0.0"))
        ret |= FAIL

    sorted_requirements = sorted(requirements)

    buf = StringList(
        [*comments, *git_lines, *[str(req) for req in sorted_requirements]])
    buf.blankline(ensure_single=True)

    if (requirements != sorted_requirements
            and buf != filename.read_lines()) or ret:
        print('\n'.join(buf))
        # print(coloured_diff(
        # 		filename.read_lines(),
        # 		buf,
        # 		str(filename),
        # 		str(filename),
        # 		"(original)",
        # 		"(sorted)",
        # 		lineterm='',
        # 		))
        ret |= FAIL
        filename.write_lines(buf)

    return ret
示例#7
0
    def load_config(self) -> Dict[str, Any]:
        """
		Load the ``mkrecipe`` configuration.
		"""

        config = parse_yaml(self.project_dir, allow_unknown_keys=True)

        config["name"] = config["modname"]
        config["description"] = config["short_desc"]
        config["authors"] = [{"name": config["author"]}]
        config["maintainers"] = []
        config["conda-channels"] = config["conda_channels"]
        config["optional-dependencies"] = config["extras_require"]
        config["dependencies"] = sorted(
            read_requirements(self.project_dir / "requirements.txt")[0])
        config["requires"] = ["setuptools", "wheel"]

        if config["conda_extras"] in (["none"], ["all"]):
            config["extras"] = config["conda_extras"][0]
        else:
            config["extras"] = config["conda_extras"]

        if config["use_whey"]:
            config["requires"].append("whey")

        url = "https://github.com/{username}/{repo_name}".format_map(config)
        config["urls"] = {
            "Homepage":
            url,
            "Issue Tracker":
            "https://github.com/{username}/{repo_name}/issues".format_map(
                config),
            "Source Code":
            url,
        }

        if config["enable_docs"]:
            config["urls"]["Documentation"] = config["docs_url"]

        config["package"] = posixpath.join(
            # config["source_dir"],
            config["import_name"].split('.', 1)[0], )

        if config["import_name"] != config["pypi_name"] and config[
                "stubs_package"]:
            config["package"] = "{import_name}-stubs".format_map(config)

        license_ = config["license"]
        config["license-key"] = {v: k
                                 for k, v in license_lookup.items()
                                 }.get(license_, license_)

        return config
示例#8
0
	def merge_requirements(self) -> List[str]:
		current_requirements, comments, invalid_lines = read_requirements(self.req_file, include_invalid=True)

		for line in invalid_lines:
			if line.startswith("git+"):
				comments.append(line)
			else:
				warnings.warn(f"Ignored invalid requirement {line!r}")

		self.target_requirements = set(combine_requirements(*current_requirements, *self.target_requirements))

		return comments
def parse_requirements_txt(base_dir: PathLike) -> List[str]:
    """
	Returns a list of package names listed as requirements in the ``requirements.txt`` file.

	:param base_dir: The directory in which to find the ``requirements.txt`` file.
	"""

    requirements, comments, invalid = read_requirements(
        req_file=PathPlus(base_dir) / "requirements.txt",
        include_invalid=True,
    )

    return sorted(map(attrgetter("name"), requirements))
示例#10
0
def load_toml(filename: PathLike) -> ConfigDict:
	"""
	Load the ``pyproject-devenv`` configuration mapping from the given TOML file.

	:param filename:
	"""

	filename = PathPlus(filename)

	devenv_config = _DevenvConfig.load(filename, set_defaults=True)

	if devenv_config.project is None:
		raise BadConfigError(f"The '[project]' table was not found in {filename.as_posix()!r}")

	dynamic = set(devenv_config.project["dynamic"])
	project_dir = filename.parent

	if "dependencies" in dynamic:
		if (project_dir / "requirements.txt").is_file():
			dependencies = read_requirements(project_dir / "requirements.txt", include_invalid=True)[0]
			devenv_config.project["dependencies"] = sorted(combine_requirements(dependencies))
		else:
			raise BadConfigError(
					"'project.dependencies' was listed as a dynamic field "
					"but no 'requirements.txt' file was found."
					)

	if devenv_config.build_system is None:
		build_dependencies = None
	else:
		build_dependencies = devenv_config.build_system["requires"]

	return {
			"name": devenv_config.project["name"],
			"dependencies": devenv_config.project["dependencies"],
			"optional_dependencies": devenv_config.project["optional-dependencies"],
			"build_dependencies": build_dependencies,
			}
示例#11
0
def classifiers(
    add: bool,
    status: Optional[int] = None,
    library: Optional[bool] = None,
):
    """
	Suggest trove classifiers based on repository metadata.
	"""

    # stdlib
    import sys

    # 3rd party
    from consolekit.input import choice, confirm
    from domdf_python_tools.paths import PathPlus
    from natsort import natsorted
    from shippinglabel.classifiers import classifiers_from_requirements
    from shippinglabel.requirements import combine_requirements, read_requirements

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()
    config = rh.templates.globals
    suggested_classifiers = set()
    pkg_dir = rh.target_repo / config["import_name"]

    for language in detect_languages(pkg_dir):
        suggested_classifiers.add(f"Programming Language :: {language}")

    # If not a tty, assume default options are False
    if not sys.stdout.isatty():
        if add is None:
            add = False
        if library is None:
            library = False

    if status is None and sys.stdout.isatty():
        click.echo("What is the Development Status of this project?")
        status = choice(text="Status",
                        options=development_status_options,
                        start_index=1) + 1

    if status is not None:
        status_string = f"Development Status :: {status} - {development_status_options[status - 1]}"
        suggested_classifiers.add(status_string)

    if library is None:
        library = click.confirm("Is this a library for developers?")

    if library:
        suggested_classifiers.add(
            "Topic :: Software Development :: Libraries :: Python Modules")
        suggested_classifiers.add("Intended Audience :: Developers")

    lib_requirements = combine_requirements(
        read_requirements(rh.target_repo / "requirements.txt")[0])

    suggested_classifiers.update(
        classifiers_from_requirements(lib_requirements))

    # file_content = dedent(
    # 		f"""\
    # # Remove any classifiers you don't think are relevant.
    # # Lines starting with a # will be discarded.
    # """
    # 		)
    # file_content += "\n".join(natsorted(suggested_classifiers))
    #
    # def remove_invalid_entries(line):
    # 	line = line.strip()
    # 	if not line:
    # 		return False
    # 	elif line.startswith("#"):
    # 		return False
    # 	else:
    # 		return True
    #
    # suggested_classifiers = set(
    # 		filter(remove_invalid_entries, (click.edit(file_content) or file_content).splitlines())
    # 		)

    if not suggested_classifiers:
        if sys.stdout.isatty():
            click.echo("Sorry, I've nothing to suggest 😢")

        sys.exit(1)

    if sys.stdout.isatty():
        click.echo(
            "Based on what you've told me I think the following classifiers are appropriate:"
        )
        for classifier in natsorted(suggested_classifiers):
            click.echo(f" - {classifier}")
    else:
        for classifier in natsorted(suggested_classifiers):
            click.echo(classifier)

    if add is None:
        add = confirm(
            "Do you want to add these to the 'repo_helper.yml' file?")

    if add:

        # this package
        from repo_helper.configuration import YamlEditor

        yaml = YamlEditor()
        yaml.update_key(rh.target_repo / "repo_helper.yml",
                        "classifiers",
                        suggested_classifiers,
                        sort=True)
示例#12
0
def stubs(add: Optional[bool] = None,
          force_tty: bool = False,
          no_pager: bool = False):
    """
	Suggest :pep:`561` type stubs.
	"""

    # stdlib
    import shutil
    import sys
    from itertools import chain

    # 3rd party
    import tabulate
    from apeye import URL
    from apeye.requests_url import TrailingRequestsURL
    from domdf_python_tools.paths import PathPlus
    from domdf_python_tools.stringlist import StringList
    from shippinglabel import normalize
    from shippinglabel.pypi import PYPI_API
    from shippinglabel.requirements import combine_requirements, read_requirements

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()
    config = rh.templates.globals

    requirements_files = [rh.target_repo / "requirements.txt"]

    if config["enable_tests"]:
        requirements_files.append(rh.target_repo / config["tests_dir"] /
                                  "requirements.txt")

    requirements_files.extend(
        (rh.target_repo /
         config["import_name"]).iterchildren("**/requirements.txt"))

    all_requirements = set(
        chain.from_iterable(
            read_requirements(file, include_invalid=True)[0]
            for file in requirements_files))

    stubs_file = rh.target_repo / "stubs.txt"

    if stubs_file.is_file():
        existing_stubs, stub_comments, invalid_stubs = read_requirements(
            stubs_file, include_invalid=True)
    else:
        existing_stubs = set()
        stub_comments, invalid_stubs = [], []

    suggestions = {}

    for requirement in all_requirements:
        if normalize(requirement.name) in {"typing-extensions"}:
            continue

        types_url = TrailingRequestsURL(
            PYPI_API / f"types-{requirement.name.lower()}" / "json/")
        stubs_url = TrailingRequestsURL(
            PYPI_API / f"{requirement.name.lower()}-stubs" / "json/")

        response = stubs_url.head()
        if response.status_code == 404:
            # No stubs found for -stubs
            response = types_url.head()
            if response.status_code == 404:
                # No stubs found for types-
                continue
            else:
                response_url = URL(response.url)
                suggestions[str(requirement)] = response_url.parent.name
                # print(requirement, response.url)
        else:
            response_url = URL(response.url)
            suggestions[str(requirement)] = response_url.parent.name
            # print(requirement, response.url)

    if not suggestions:
        if sys.stdout.isatty() or force_tty:
            click.echo("No stubs to suggest.")
        sys.exit(1)

    if sys.stdout.isatty() or force_tty:

        table = StringList([
            "Suggestions",
            "-----------",
            tabulate.tabulate(suggestions.items(),
                              headers=["Requirement", "Stubs"]),
        ])
        table.blankline(ensure_single=True)

        if no_pager or len(table) <= shutil.get_terminal_size().lines:
            click.echo('\n'.join(table))
        else:
            click.echo_via_pager('\n'.join(table))

        if add is None:
            add = confirm("Do you want to add these to the 'stubs.txt' file?")

        if add:
            new_stubs = sorted(
                combine_requirements(*existing_stubs, *suggestions.values()))

            stubs_file.write_lines([
                *stub_comments,
                *invalid_stubs,
                *map(str, new_stubs),
            ])

    else:
        for stub in suggestions.values():
            click.echo(stub)

    sys.exit(0)
示例#13
0
# stdlib
import os

# 3rd party
from apeye.requests_url import RequestsURL
from domdf_python_tools.paths import PathPlus
from domdf_python_tools.stringlist import StringList
from shippinglabel.requirements import read_requirements

head_sha = RequestsURL(
    "https://api.github.com/repos/domdfcoding/repo_helper/commits/master").get(
    ).json()["sha"]

requirements, comments, invalid = read_requirements("requirements.txt",
                                                    include_invalid=True)

sorted_requirements = sorted(requirements)

buf = StringList(comments)

for line in invalid:
    if line.startswith("git+https://github.com/domdfcoding/repo_helper@"):
        buf.append(
            f"git+https://github.com/domdfcoding/repo_helper@{head_sha}")
    else:
        buf.append(line)

buf.extend(str(req) for req in sorted_requirements)

PathPlus("requirements.txt").write_lines(buf)
示例#14
0
def requirements(
    no_pager: bool = False,
    depth: int = -1,
    concise: bool = False,
    no_venv: bool = False,
):
    """
	Lists the requirements of this library, and their dependencies.
	"""

    # stdlib
    import re
    import shutil

    # 3rd party
    from domdf_python_tools.compat import importlib_metadata
    from domdf_python_tools.iterative import make_tree
    from domdf_python_tools.paths import PathPlus, in_directory
    from domdf_python_tools.stringlist import StringList
    from packaging.requirements import Requirement
    from shippinglabel.requirements import (ComparableRequirement,
                                            combine_requirements,
                                            list_requirements,
                                            read_requirements)

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings(allow_unknown_keys=True)

    with in_directory(rh.target_repo):

        buf = StringList([
            f"{rh.templates.globals['pypi_name']}=={rh.templates.globals['version']}"
        ])
        raw_requirements = sorted(read_requirements("requirements.txt")[0])
        tree: List[Union[str, List[str], List[Union[str, List]]]] = []
        venv_dir = (rh.target_repo / "venv")

        if venv_dir.is_dir() and not no_venv:
            # Use virtualenv as it exists
            search_path = []

            for directory in (venv_dir / "lib").glob("python3.*"):
                search_path.append(str(directory / "site-packages"))

            importlib_metadata.DistributionFinder.Context.path = search_path  # type: ignore

        if concise:
            concise_requirements = []

            def flatten(iterable: Iterable[Union[Requirement, Iterable]]):
                for item in iterable:
                    if isinstance(item, str):
                        yield item
                    else:
                        yield from flatten(item)  # type: ignore

            for requirement in raw_requirements:
                concise_requirements.append(requirement)
                # TODO: remove "extra == " marker
                for req in flatten(
                        list_requirements(str(requirement), depth=depth - 1)):
                    concise_requirements.append(
                        ComparableRequirement(
                            re.sub('; extra == ".*"', '', req)))

            concise_requirements = sorted(
                set(combine_requirements(concise_requirements)))
            tree = list(map(str, concise_requirements))

        else:
            for requirement in raw_requirements:
                tree.append(str(requirement))
                deps = list(
                    list_requirements(str(requirement), depth=depth - 1))
                if deps:
                    tree.append(deps)

        buf.extend(make_tree(tree))

        if shutil.get_terminal_size().lines >= len(buf):
            # Don't use pager if fewer lines that terminal height
            no_pager = True

        if no_pager:
            click.echo(str(buf))
        else:
            click.echo_via_pager(str(buf))