Ejemplo n.º 1
0
def load_toml(filename: PathLike) -> Dict[str, Any]:  # TODO: TypedDict
    """
	Load the ``mkrecipe`` configuration mapping from the given TOML file.

	:param filename:
	"""

    filename = PathPlus(filename)

    project_dir = filename.parent
    config = dom_toml.load(filename)

    parsed_config: Dict[str, Any] = {}
    tool_table = config.get("tool", {})

    with in_directory(filename.parent):

        parsed_config.update(BuildSystemParser().parse(config.get(
            "build-system", {}),
                                                       set_defaults=True))
        parsed_config.update(whey.config.WheyParser().parse(
            tool_table.get("whey", {})))
        parsed_config.update(MkrecipeParser().parse(tool_table.get(
            "mkrecipe", {}),
                                                    set_defaults=True))

        if "project" in config:
            parsed_config.update(PEP621Parser().parse(config["project"],
                                                      set_defaults=True))
        else:
            raise KeyError(f"'project' table not found in '{filename!s}'")

    # set defaults
    parsed_config.setdefault("package",
                             config["project"]["name"].split('.', 1)[0])
    parsed_config.setdefault("license-key", None)

    if "dependencies" in parsed_config.get("dynamic", []):
        if (project_dir / "requirements.txt").is_file():
            dependencies = read_requirements(project_dir / "requirements.txt",
                                             include_invalid=True)[0]
            parsed_config["dependencies"] = sorted(
                combine_requirements(dependencies))
        else:
            raise BadConfigError(
                "'project.dependencies' was listed as a dynamic field "
                "but no 'requirements.txt' file was found.")

    parsed_config["version"] = str(parsed_config["version"])
    parsed_config["requires"] = sorted(
        set(
            combine_requirements(
                parsed_config["requires"],
                ComparableRequirement("setuptools"),
                ComparableRequirement("wheel"),
            )))

    return parsed_config
Ejemplo n.º 2
0
    def merge_requirements(self) -> List[str]:
        current_requirements_, comments, invalid_lines = read_requirements(
            self.req_file, include_invalid=True)

        current_requirements = list(current_requirements_)
        current_requirements.append(ComparableRequirement("sphinx>=3.0.3"))

        for line in invalid_lines:
            if line.startswith("git+"):
                if line == "git+https://github.com/sphinx-toolbox/html-section.git":
                    continue
                comments.append(line)
            else:
                warnings.warn(f"Ignored invalid requirement {line!r}")

        other_themes = list(self.theme_versions.keys())
        theme_name = normalize(self._globals["sphinx_html_theme"])
        if theme_name in other_themes:
            other_themes.remove(theme_name)

        for req in current_requirements:
            req.name = normalize(req.name)
            # if req.name not in self.get_target_requirement_names() and req.name not in self.theme_versions.keys():
            if req.name not in other_themes:
                if req.name in {"sphinx-autodoc-typehints", "autodocsumm"}:
                    continue
                else:
                    self.target_requirements.add(req)

        self.target_requirements = set(
            combine_requirements(self.target_requirements))

        return comments
Ejemplo n.º 3
0
def parse_extras(raw_config_vars: Mapping[str, Any], repo_path: pathlib.Path) -> Tuple[Dict, List[str]]:
	"""
	Returns parsed ``setuptools`` ``extras_require``.

	:param raw_config_vars: Dictionary to obtain the value from.
	:param repo_path: The path to the repository.
	"""

	additional_requirements_files = set(raw_config_vars.get("additional_requirements_files", ()))
	extras_require = raw_config_vars.get("extras_require", {})

	for extra, requires in extras_require.items():
		if isinstance(requires, str):
			if (repo_path / requires).is_file():
				# a path to the requirements file from the repo root
				extras_require[extra] = parse_extra_requirements_file(
						(repo_path / requires).read_text(encoding="UTF-8")
						)
				additional_requirements_files.add(requires)
			else:
				# A single requirement
				extras_require[extra] = [requires]
		else:
			extras_require[extra] = sorted(combine_requirements(map(ComparableRequirement, extras_require[extra])))

	extras_require["all"] = sorted(set(chain.from_iterable(extras_require.values())))

	if not extras_require["all"]:
		del extras_require["all"]

	return {k: list(map(str, v)) for k, v in extras_require.items()}, sorted(additional_requirements_files)
Ejemplo n.º 4
0
def requirements_from_flit(
    package_root: pathlib.Path,
    options: Dict,
    env: sphinx.environment.BuildEnvironment,
    extra: str,
) -> List[str]:
    """
	Load requirements from the ``[tool.flit.metadata.requires-extra]`` section of
	a ``pyproject.toml`` file in the root of the repository.

	:param package_root: The path to the package root.
	:param options:
	:param env:
	:param extra: The name of the "extra" that the requirements are for.

	:return: List of requirements.
	"""  # noqa D400

    pyproject_file = PathPlus(env.srcdir).parent / "pyproject.toml"

    if not pyproject_file.is_file():
        raise FileNotFoundError(
            f"Cannot find pyproject.toml in '{pyproject_file.parent}'")

    flit_extras = parse_pyproject_extras(pyproject_file,
                                         flavour="flit",
                                         normalize_func=normalize_keep_dot)

    if extra not in flit_extras:
        raise ValueError(
            f"'{extra}' not found in '[tool.flit.metadata.requires-extra]'")

    requirements = flit_extras[extra]

    return list(map(str, sorted(combine_requirements(requirements))))
Ejemplo n.º 5
0
def requirements_from_file(
    package_root: pathlib.Path,
    options: Dict,
    env: sphinx.environment.BuildEnvironment,
    extra: str,
) -> List[str]:
    """
	Load requirements from the specified file.

	:param package_root: The path to the package root
	:param options:
	:param env:
	:param extra: The name of the "extra" that the requirements are for

	:return: List of requirements
	"""

    requirements_file = package_root / options["file"]

    if not requirements_file.is_file():
        raise FileNotFoundError(
            f"Cannot find requirements file '{requirements_file}'")

    mime_type = mimetypes.guess_type(str(requirements_file))[0]
    if not mime_type or not mime_type.startswith("text/"):
        raise ValueError(f"'{requirements_file}' is not a text file.")

    requirements, comments = read_requirements(
        requirements_file,
        normalize_func=normalize_keep_dot,
    )

    return list(map(str, sorted(combine_requirements(requirements))))
Ejemplo n.º 6
0
def requirement(requirement: str, file: Optional[str] = None) -> int:
    """
	Add a requirement.
	"""

    # 3rd party
    from consolekit.utils import abort
    from domdf_python_tools.paths import PathPlus, traverse_to_file
    from domdf_python_tools.stringlist import StringList
    from packaging.requirements import InvalidRequirement
    from packaging.specifiers import SpecifierSet
    from shippinglabel import normalize_keep_dot
    from shippinglabel.requirements import ComparableRequirement, combine_requirements, read_requirements

    repo_dir: PathPlus = traverse_to_file(PathPlus.cwd(), "repo_helper.yml",
                                          "git_helper.yml")

    if file is None:
        requirements_file = repo_dir / "requirements.txt"

        if not requirements_file.is_file():
            raise abort(f"'{file}' not found.")

    else:
        requirements_file = PathPlus(file)

        if not requirements_file.is_file():
            raise abort("'requirements.txt' not found.")

    try:
        req = ComparableRequirement(requirement)
    except InvalidRequirement as e:
        raise BadRequirement(requirement, e)

    response = (PYPI_API / req.name / "json/").get()
    if response.status_code != 200:
        raise click.BadParameter(f"No such project {req.name}")
    else:
        req.name = normalize(response.json()["info"]["name"])
        if not req.specifier:
            req.specifier = SpecifierSet(
                f">={response.json()['info']['version']}")

        click.echo(f"Adding requirement '{req}'")

    requirements, comments, invalid_lines = read_requirements(
        req_file=requirements_file,
        include_invalid=True,
        normalize_func=normalize_keep_dot,
    )

    requirements.add(req)

    buf = StringList([*comments, *invalid_lines])
    buf.extend(str(req) for req in sorted(combine_requirements(requirements)))
    requirements_file.write_lines(buf)

    return 0
Ejemplo n.º 7
0
	def merge_requirements(self) -> List[str]:
		current_requirements, comments, invalid_lines = read_requirements(self.req_file, include_invalid=True)

		for line in invalid_lines:
			if line.startswith("git+"):
				comments.append(line)
			else:
				warnings.warn(f"Ignored invalid requirement {line!r}")

		self.target_requirements = set(combine_requirements(*current_requirements, *self.target_requirements))

		return comments
Ejemplo n.º 8
0
    def make_for_wheel(self) -> str:
        """
		Make the recipe for creating a conda package from a wheel.

		.. versionadded:: 0.3.0

		:returns: The ``meta.yaml`` recipe as a string.
		"""

        # find the download URL
        wheel_url = self.get_wheel_url()

        runtime_requirements = self.get_runtime_requirements()
        host_requirements = sorted(
            set(
                combine_requirements(
                    runtime_requirements,
                    "setuptools",
                    "wheel",
                    normalize_func=str,
                )))

        project_license = license_lookup.get(self.config["license-key"],
                                             self.config["license-key"])

        environment = Environment(loader=BaseLoader(),
                                  undefined=StrictUndefined)  # nosec: B701
        template = environment.from_string(
            importlib_resources.read_text("mkrecipe", "recipe_template.ymlt"))
        config = {
            k.replace('-', '_'): v
            for k, v in self.config.items() if k != "requires"
        }

        return template.render(
            wheel_url=wheel_url,
            host_requirements=host_requirements,
            runtime_requirements=runtime_requirements,
            conda_full_description=self.make_conda_description(),
            url_lines=list(self.get_urls()),
            all_maintainers=sorted(self.get_maintainers()),
            project_license=project_license,
            requires=["setuptools", "wheel"],
            wheel=True,
            **config,
        )
Ejemplo n.º 9
0
    def parse_requires(
            self, config: Dict[str,
                               TOML_TYPES]) -> List[ComparableRequirement]:
        """
		Parse the :pep:`requires <518#build-system-table>` key.

		:param config: The unparsed TOML config for the :pep:`build-system table <518#build-system-table>`.
		"""  # noqa: RST399

        parsed_dependencies = set()
        key_path = [self.table_name, "requires"]

        self.assert_sequence_not_str(config["requires"], key_path)

        for idx, keyword in enumerate(config["requires"]):
            self.assert_indexed_type(keyword, str, key_path, idx=idx)
            parsed_dependencies.add(ComparableRequirement(keyword))

        return sorted(combine_requirements(parsed_dependencies))
Ejemplo n.º 10
0
    def make(self) -> str:
        """
		Make the recipe.

		:returns: The ``meta.yaml`` recipe as a string.
		"""

        # find the download URL
        sdist_url = self.get_sdist_url()

        runtime_requirements = self.get_runtime_requirements()
        host_requirements = sorted(
            set(
                combine_requirements(
                    runtime_requirements,
                    *self.config["requires"],
                    normalize_func=str,
                )))

        project_license = license_lookup.get(self.config["license-key"],
                                             self.config["license-key"])

        environment = Environment(loader=BaseLoader(),
                                  undefined=StrictUndefined)  # nosec: B701
        template = environment.from_string(
            importlib_resources.read_text("mkrecipe", "recipe_template.ymlt"))
        config = {k.replace('-', '_'): v for k, v in self.config.items()}

        return template.render(
            sdist_url=sdist_url,
            host_requirements=host_requirements,
            runtime_requirements=runtime_requirements,
            conda_full_description=self.make_conda_description(),
            url_lines=list(self.get_urls()),
            all_maintainers=sorted(self.get_maintainers()),
            project_license=project_license,
            **config,
        )
Ejemplo n.º 11
0
def load_toml(filename: PathLike) -> ConfigDict:
	"""
	Load the ``pyproject-devenv`` configuration mapping from the given TOML file.

	:param filename:
	"""

	filename = PathPlus(filename)

	devenv_config = _DevenvConfig.load(filename, set_defaults=True)

	if devenv_config.project is None:
		raise BadConfigError(f"The '[project]' table was not found in {filename.as_posix()!r}")

	dynamic = set(devenv_config.project["dynamic"])
	project_dir = filename.parent

	if "dependencies" in dynamic:
		if (project_dir / "requirements.txt").is_file():
			dependencies = read_requirements(project_dir / "requirements.txt", include_invalid=True)[0]
			devenv_config.project["dependencies"] = sorted(combine_requirements(dependencies))
		else:
			raise BadConfigError(
					"'project.dependencies' was listed as a dynamic field "
					"but no 'requirements.txt' file was found."
					)

	if devenv_config.build_system is None:
		build_dependencies = None
	else:
		build_dependencies = devenv_config.build_system["requires"]

	return {
			"name": devenv_config.project["name"],
			"dependencies": devenv_config.project["dependencies"],
			"optional_dependencies": devenv_config.project["optional-dependencies"],
			"build_dependencies": build_dependencies,
			}
Ejemplo n.º 12
0
    def parse_dependencies(
            self, config: Dict[str,
                               TOML_TYPES]) -> List[ComparableRequirement]:
        """
		Parse the :pep621:`dependencies` key, giving the dependencies of the project.

		* **Format**: :toml:`Array` of :pep:`508` strings
		* **Core Metadata**: :core-meta:`Requires-Dist`

		Each string MUST be formatted as a valid :pep:`508` string.

		:bold-title:`Example:`

		.. code-block:: TOML

			[project]
			dependencies = [
				"httpx",
				"gidgethub[httpx]>4.0.0",
				"django>2.1; os_name != 'nt'",
				"django>2.0; os_name == 'nt'"
			]

		:param config: The unparsed TOML config for the :pep621:`project table <table-name>`.
		"""

        parsed_dependencies = set()

        key_path = [self.table_name, "dependencies"]

        self.assert_sequence_not_str(config["dependencies"], key_path)

        for idx, keyword in enumerate(config["dependencies"]):
            self.assert_indexed_type(keyword, str, key_path, idx=idx)
            parsed_dependencies.add(ComparableRequirement(keyword))

        return sorted(combine_requirements(parsed_dependencies))
Ejemplo n.º 13
0
def make_pyproject(repo_path: pathlib.Path,
                   templates: Environment) -> List[str]:
    """
	Create the ``pyproject.toml`` file for :pep:`517`.

	:param repo_path: Path to the repository root.
	:param templates:
	"""

    pyproject_file = PathPlus(repo_path / "pyproject.toml")

    data: DefaultDict[str, Any]

    if pyproject_file.is_file():
        data = DefaultDict(dom_toml.load(pyproject_file))
    else:
        data = DefaultDict()

    data.set_default("build-system", {})
    build_backend = "setuptools.build_meta"

    build_requirements_ = {
        "setuptools>=40.6.0", "wheel>=0.34.2", "whey", "repo-helper",
        *templates.globals["tox_build_requirements"],
        *data["build-system"].get("requires", [])
    }

    build_requirements = sorted(
        combine_requirements(
            ComparableRequirement(req) for req in build_requirements_))

    if templates.globals["use_whey"]:
        for old_dep in ["setuptools", "wheel"]:
            if old_dep in build_requirements:
                build_requirements.remove(old_dep)  # type: ignore

    if templates.globals["use_whey"]:
        build_backend = "whey"
    elif "whey" in build_requirements:
        build_requirements.remove("whey")  # type: ignore

    if "repo-helper" in build_requirements:
        build_requirements.remove("repo-helper")  # type: ignore

    data["build-system"]["requires"] = list(map(str, build_requirements))
    data["build-system"]["build-backend"] = build_backend

    data["project"] = DefaultDict(data.get("project", {}))
    data["project"]["name"] = templates.globals["pypi_name"]
    data["project"]["version"] = templates.globals["version"]
    data["project"]["description"] = templates.globals["short_desc"]
    data["project"]["readme"] = "README.rst"
    data["project"]["keywords"] = sorted(templates.globals["keywords"])
    data["project"]["dynamic"] = [
        "requires-python", "classifiers", "dependencies"
    ]
    data["project"]["authors"] = [{
        "name": templates.globals["author"],
        "email": templates.globals["email"]
    }]
    data["project"]["license"] = {"file": "LICENSE"}

    if templates.globals["requires_python"] is not None:
        data["project"]["dynamic"].pop(0)
        data["project"][
            "requires-python"] = f">={templates.globals['requires_python']}"

    url = "https://github.com/{username}/{repo_name}".format_map(
        templates.globals)
    data["project"]["urls"] = {
        "Homepage":
        url,
        "Issue Tracker":
        "https://github.com/{username}/{repo_name}/issues".format_map(
            templates.globals),
        "Source Code":
        url,
    }

    if templates.globals["enable_docs"]:
        data["project"]["urls"]["Documentation"] = templates.globals[
            "docs_url"]

    # extras-require

    data["project"]["optional-dependencies"] = {}

    for extra, dependencies in templates.globals["extras_require"].items():
        data["project"]["optional-dependencies"][extra] = list(
            map(str, dependencies))

    if not data["project"]["optional-dependencies"]:
        del data["project"]["optional-dependencies"]

    # entry-points

    if templates.globals["console_scripts"]:
        data["project"]["scripts"] = dict(
            split_entry_point(e) for e in templates.globals["console_scripts"])

    data["project"]["entry-points"] = {}

    for group, entry_points in templates.globals["entry_points"].items():
        data["project"]["entry-points"][group] = dict(
            split_entry_point(e) for e in entry_points)

    if not data["project"]["entry-points"]:
        del data["project"]["entry-points"]

    # tool
    data.set_default("tool", {})

    # tool.mkrecipe
    if templates.globals["enable_conda"]:
        data["tool"].setdefault("mkrecipe", {})
        data["tool"]["mkrecipe"]["conda-channels"] = templates.globals[
            "conda_channels"]

        if templates.globals["conda_extras"] in (["none"], ["all"]):
            data["tool"]["mkrecipe"]["extras"] = templates.globals[
                "conda_extras"][0]
        else:
            data["tool"]["mkrecipe"]["extras"] = templates.globals[
                "conda_extras"]
    else:
        if "mkrecipe" in data["tool"]:
            del data["tool"]["mkrecipe"]

    # tool.whey
    data["tool"].setdefault("whey", {})

    data["tool"]["whey"]["base-classifiers"] = templates.globals["classifiers"]

    python_versions = set()
    python_implementations = set()

    for py_version in templates.globals["python_versions"]:
        py_version = str(py_version)

        if pre_release_re.match(py_version):
            continue

        pypy_version_m = _pypy_version_re.match(py_version)

        if py_version.startswith('3'):
            python_versions.add(py_version)
            python_implementations.add("CPython")

        elif pypy_version_m:
            python_implementations.add("PyPy")
            python_versions.add(f"3.{pypy_version_m.group(1)}")

    data["tool"]["whey"]["python-versions"] = natsorted(python_versions)
    data["tool"]["whey"]["python-implementations"] = sorted(
        python_implementations)

    data["tool"]["whey"]["platforms"] = templates.globals["platforms"]

    license_ = templates.globals["license"]
    data["tool"]["whey"]["license-key"] = {
        v: k
        for k, v in license_lookup.items()
    }.get(license_, license_)

    if templates.globals["source_dir"]:
        raise NotImplementedError(
            "Whey does not support custom source directories")

    elif templates.globals["import_name"] != templates.globals["pypi_name"]:
        if templates.globals["stubs_package"]:
            data["tool"]["whey"]["package"] = "{import_name}-stubs".format_map(
                templates.globals)
        else:
            data["tool"]["whey"]["package"] = posixpath.join(
                # templates.globals["source_dir"],
                templates.globals["import_name"].split('.', 1)[0], )

    if templates.globals["manifest_additional"]:
        data["tool"]["whey"]["additional-files"] = templates.globals[
            "manifest_additional"]
    elif "additional-files" in data["tool"]["whey"]:
        del data["tool"]["whey"]["additional-files"]

    if not templates.globals["enable_tests"] and not templates.globals[
            "stubs_package"]:
        data["tool"]["importcheck"] = data["tool"].get("importcheck", {})

    if templates.globals["enable_docs"]:
        data["tool"]["sphinx-pyproject"] = make_sphinx_config_dict(templates)
    else:
        data["tool"].pop("sphinx-pyproject", None)

    # [tool.mypy]
    # This is added regardless of the supported mypy version.
    # It isn't removed from setup.cfg unless the version is 0.901 or above
    data["tool"].setdefault("mypy", {})

    data["tool"]["mypy"].update(_get_mypy_config(templates.globals))

    if templates.globals["mypy_plugins"]:
        data["tool"]["mypy"]["plugins"] = templates.globals["mypy_plugins"]

    # [tool.dependency-dash]
    data["tool"].setdefault("dependency-dash", {})
    data["tool"]["dependency-dash"]["requirements.txt"] = {"order": 10}

    if templates.globals["enable_tests"]:
        data["tool"]["dependency-dash"]["tests/requirements.txt"] = {
            "order": 20,
            "include": False,
        }

    if templates.globals["enable_docs"]:
        data["tool"]["dependency-dash"]["doc-source/requirements.txt"] = {
            "order": 30,
            "include": False,
        }

    # [tool.snippet-fmt]
    data["tool"].setdefault("snippet-fmt", {})
    data["tool"]["snippet-fmt"].setdefault("languages", {})
    data["tool"]["snippet-fmt"].setdefault("directives", ["code-block"])

    data["tool"]["snippet-fmt"]["languages"]["python"] = {"reformat": True}
    data["tool"]["snippet-fmt"]["languages"]["TOML"] = {"reformat": True}
    data["tool"]["snippet-fmt"]["languages"]["ini"] = {}
    data["tool"]["snippet-fmt"]["languages"]["json"] = {}

    if not data["tool"]:
        del data["tool"]

    # TODO: managed message
    dom_toml.dump(data, pyproject_file, encoder=dom_toml.TomlEncoder)

    return [pyproject_file.name]
Ejemplo n.º 14
0
def classifiers(
    add: bool,
    status: Optional[int] = None,
    library: Optional[bool] = None,
):
    """
	Suggest trove classifiers based on repository metadata.
	"""

    # stdlib
    import sys

    # 3rd party
    from consolekit.input import choice, confirm
    from domdf_python_tools.paths import PathPlus
    from natsort import natsorted
    from shippinglabel.classifiers import classifiers_from_requirements
    from shippinglabel.requirements import combine_requirements, read_requirements

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()
    config = rh.templates.globals
    suggested_classifiers = set()
    pkg_dir = rh.target_repo / config["import_name"]

    for language in detect_languages(pkg_dir):
        suggested_classifiers.add(f"Programming Language :: {language}")

    # If not a tty, assume default options are False
    if not sys.stdout.isatty():
        if add is None:
            add = False
        if library is None:
            library = False

    if status is None and sys.stdout.isatty():
        click.echo("What is the Development Status of this project?")
        status = choice(text="Status",
                        options=development_status_options,
                        start_index=1) + 1

    if status is not None:
        status_string = f"Development Status :: {status} - {development_status_options[status - 1]}"
        suggested_classifiers.add(status_string)

    if library is None:
        library = click.confirm("Is this a library for developers?")

    if library:
        suggested_classifiers.add(
            "Topic :: Software Development :: Libraries :: Python Modules")
        suggested_classifiers.add("Intended Audience :: Developers")

    lib_requirements = combine_requirements(
        read_requirements(rh.target_repo / "requirements.txt")[0])

    suggested_classifiers.update(
        classifiers_from_requirements(lib_requirements))

    # file_content = dedent(
    # 		f"""\
    # # Remove any classifiers you don't think are relevant.
    # # Lines starting with a # will be discarded.
    # """
    # 		)
    # file_content += "\n".join(natsorted(suggested_classifiers))
    #
    # def remove_invalid_entries(line):
    # 	line = line.strip()
    # 	if not line:
    # 		return False
    # 	elif line.startswith("#"):
    # 		return False
    # 	else:
    # 		return True
    #
    # suggested_classifiers = set(
    # 		filter(remove_invalid_entries, (click.edit(file_content) or file_content).splitlines())
    # 		)

    if not suggested_classifiers:
        if sys.stdout.isatty():
            click.echo("Sorry, I've nothing to suggest 😢")

        sys.exit(1)

    if sys.stdout.isatty():
        click.echo(
            "Based on what you've told me I think the following classifiers are appropriate:"
        )
        for classifier in natsorted(suggested_classifiers):
            click.echo(f" - {classifier}")
    else:
        for classifier in natsorted(suggested_classifiers):
            click.echo(classifier)

    if add is None:
        add = confirm(
            "Do you want to add these to the 'repo_helper.yml' file?")

    if add:

        # this package
        from repo_helper.configuration import YamlEditor

        yaml = YamlEditor()
        yaml.update_key(rh.target_repo / "repo_helper.yml",
                        "classifiers",
                        suggested_classifiers,
                        sort=True)
Ejemplo n.º 15
0
def parse_extra_requirements_file(requirements: str) -> List[ComparableRequirement]:
	requirements_list = [x for x in requirements.split('\n') if x]
	requirements_set = set(map(ComparableRequirement, requirements_list))
	return sorted(combine_requirements(requirements_set))
Ejemplo n.º 16
0
def stubs(add: Optional[bool] = None,
          force_tty: bool = False,
          no_pager: bool = False):
    """
	Suggest :pep:`561` type stubs.
	"""

    # stdlib
    import shutil
    import sys
    from itertools import chain

    # 3rd party
    import tabulate
    from apeye import URL
    from apeye.requests_url import TrailingRequestsURL
    from domdf_python_tools.paths import PathPlus
    from domdf_python_tools.stringlist import StringList
    from shippinglabel import normalize
    from shippinglabel.pypi import PYPI_API
    from shippinglabel.requirements import combine_requirements, read_requirements

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()
    config = rh.templates.globals

    requirements_files = [rh.target_repo / "requirements.txt"]

    if config["enable_tests"]:
        requirements_files.append(rh.target_repo / config["tests_dir"] /
                                  "requirements.txt")

    requirements_files.extend(
        (rh.target_repo /
         config["import_name"]).iterchildren("**/requirements.txt"))

    all_requirements = set(
        chain.from_iterable(
            read_requirements(file, include_invalid=True)[0]
            for file in requirements_files))

    stubs_file = rh.target_repo / "stubs.txt"

    if stubs_file.is_file():
        existing_stubs, stub_comments, invalid_stubs = read_requirements(
            stubs_file, include_invalid=True)
    else:
        existing_stubs = set()
        stub_comments, invalid_stubs = [], []

    suggestions = {}

    for requirement in all_requirements:
        if normalize(requirement.name) in {"typing-extensions"}:
            continue

        types_url = TrailingRequestsURL(
            PYPI_API / f"types-{requirement.name.lower()}" / "json/")
        stubs_url = TrailingRequestsURL(
            PYPI_API / f"{requirement.name.lower()}-stubs" / "json/")

        response = stubs_url.head()
        if response.status_code == 404:
            # No stubs found for -stubs
            response = types_url.head()
            if response.status_code == 404:
                # No stubs found for types-
                continue
            else:
                response_url = URL(response.url)
                suggestions[str(requirement)] = response_url.parent.name
                # print(requirement, response.url)
        else:
            response_url = URL(response.url)
            suggestions[str(requirement)] = response_url.parent.name
            # print(requirement, response.url)

    if not suggestions:
        if sys.stdout.isatty() or force_tty:
            click.echo("No stubs to suggest.")
        sys.exit(1)

    if sys.stdout.isatty() or force_tty:

        table = StringList([
            "Suggestions",
            "-----------",
            tabulate.tabulate(suggestions.items(),
                              headers=["Requirement", "Stubs"]),
        ])
        table.blankline(ensure_single=True)

        if no_pager or len(table) <= shutil.get_terminal_size().lines:
            click.echo('\n'.join(table))
        else:
            click.echo_via_pager('\n'.join(table))

        if add is None:
            add = confirm("Do you want to add these to the 'stubs.txt' file?")

        if add:
            new_stubs = sorted(
                combine_requirements(*existing_stubs, *suggestions.values()))

            stubs_file.write_lines([
                *stub_comments,
                *invalid_stubs,
                *map(str, new_stubs),
            ])

    else:
        for stub in suggestions.values():
            click.echo(stub)

    sys.exit(0)
Ejemplo n.º 17
0
    def parse_optional_dependencies(
        self,
        config: Dict[str, TOML_TYPES],
    ) -> Dict[str, List[ComparableRequirement]]:
        """
		Parse the :pep621:`optional-dependencies` table, giving the optional dependencies of the project.

		* **Format**: :toml:`Table` with values of :toml:`arrays <Array>` of :pep:`508` strings
		* **Core Metadata**: :core-meta:`Requires-Dist` and :core-meta:`Provides-Extra`

		.. raw:: html

			</br>

		* The keys specify an extra, and must be valid Python identifiers.
		* The values are arrays of strings, which must be valid :pep:`508` strings.

		:bold-title:`Example:`

		.. code-block:: TOML

			[project.optional-dependencies]
			test = [
			  "pytest < 5.0.0",
			  "pytest-cov[all]"
			]

		:param config: The unparsed TOML config for the :pep621:`project table <table-name>`.
		"""

        parsed_optional_dependencies: Dict[
            str, Set[ComparableRequirement]] = dict()

        err_template = (
            f"Invalid type for 'project.optional-dependencies{{idx_string}}': "
            f"expected {dict!r}, got {{actual_type!r}}")

        optional_dependencies: Mapping[str,
                                       Any] = config["optional-dependencies"]

        if not isinstance(optional_dependencies, dict):
            raise TypeError(
                err_template.format(idx_string='',
                                    actual_type=type(optional_dependencies)))

        for extra, dependencies in optional_dependencies.items():
            if not extra.isidentifier():
                raise TypeError(
                    f"Invalid extra name {extra!r}: must be a valid Python identifier"
                )

            self.assert_sequence_not_str(
                dependencies, path=["project", "optional-dependencies", extra])

            parsed_optional_dependencies[extra] = set()

            for idx, dep in enumerate(dependencies):
                if isinstance(dep, str):
                    parsed_optional_dependencies[extra].add(
                        ComparableRequirement(dep))
                else:
                    raise TypeError(
                        err_template.format(idx_string=f'.{extra}[{idx}]',
                                            actual_type=type(dep)))

        return {
            e: sorted(combine_requirements(d))
            for e, d in parsed_optional_dependencies.items()
        }
Ejemplo n.º 18
0
def requirements(
    no_pager: bool = False,
    depth: int = -1,
    concise: bool = False,
    no_venv: bool = False,
):
    """
	Lists the requirements of this library, and their dependencies.
	"""

    # stdlib
    import re
    import shutil

    # 3rd party
    from domdf_python_tools.compat import importlib_metadata
    from domdf_python_tools.iterative import make_tree
    from domdf_python_tools.paths import PathPlus, in_directory
    from domdf_python_tools.stringlist import StringList
    from packaging.requirements import Requirement
    from shippinglabel.requirements import (ComparableRequirement,
                                            combine_requirements,
                                            list_requirements,
                                            read_requirements)

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings(allow_unknown_keys=True)

    with in_directory(rh.target_repo):

        buf = StringList([
            f"{rh.templates.globals['pypi_name']}=={rh.templates.globals['version']}"
        ])
        raw_requirements = sorted(read_requirements("requirements.txt")[0])
        tree: List[Union[str, List[str], List[Union[str, List]]]] = []
        venv_dir = (rh.target_repo / "venv")

        if venv_dir.is_dir() and not no_venv:
            # Use virtualenv as it exists
            search_path = []

            for directory in (venv_dir / "lib").glob("python3.*"):
                search_path.append(str(directory / "site-packages"))

            importlib_metadata.DistributionFinder.Context.path = search_path  # type: ignore

        if concise:
            concise_requirements = []

            def flatten(iterable: Iterable[Union[Requirement, Iterable]]):
                for item in iterable:
                    if isinstance(item, str):
                        yield item
                    else:
                        yield from flatten(item)  # type: ignore

            for requirement in raw_requirements:
                concise_requirements.append(requirement)
                # TODO: remove "extra == " marker
                for req in flatten(
                        list_requirements(str(requirement), depth=depth - 1)):
                    concise_requirements.append(
                        ComparableRequirement(
                            re.sub('; extra == ".*"', '', req)))

            concise_requirements = sorted(
                set(combine_requirements(concise_requirements)))
            tree = list(map(str, concise_requirements))

        else:
            for requirement in raw_requirements:
                tree.append(str(requirement))
                deps = list(
                    list_requirements(str(requirement), depth=depth - 1))
                if deps:
                    tree.append(deps)

        buf.extend(make_tree(tree))

        if shutil.get_terminal_size().lines >= len(buf):
            # Don't use pager if fewer lines that terminal height
            no_pager = True

        if no_pager:
            click.echo(str(buf))
        else:
            click.echo_via_pager(str(buf))