示例#1
0
    def merge_requirements(self) -> List[str]:
        current_requirements_, comments, invalid_lines = read_requirements(
            self.req_file, include_invalid=True)

        current_requirements = list(current_requirements_)
        current_requirements.append(ComparableRequirement("sphinx>=3.0.3"))

        for line in invalid_lines:
            if line.startswith("git+"):
                if line == "git+https://github.com/sphinx-toolbox/html-section.git":
                    continue
                comments.append(line)
            else:
                warnings.warn(f"Ignored invalid requirement {line!r}")

        other_themes = list(self.theme_versions.keys())
        theme_name = normalize(self._globals["sphinx_html_theme"])
        if theme_name in other_themes:
            other_themes.remove(theme_name)

        for req in current_requirements:
            req.name = normalize(req.name)
            # if req.name not in self.get_target_requirement_names() and req.name not in self.theme_versions.keys():
            if req.name not in other_themes:
                if req.name in {"sphinx-autodoc-typehints", "autodocsumm"}:
                    continue
                else:
                    self.target_requirements.add(req)

        self.target_requirements = set(
            combine_requirements(self.target_requirements))

        return comments
示例#2
0
    def parse_name(config: Dict[str, TOML_TYPES]) -> str:
        """
		Parse the :pep621:`name` key, giving the name of the project.

		* **Format**: :toml:`String`
		* **Core Metadata**: :core-meta:`Name`

		This key is required, and must be defined statically.

		Tools SHOULD normalize this name, as specified by :pep:`503`,
		as soon as it is read for internal consistency.

		:bold-title:`Example:`

		.. code-block:: TOML

			[project]
			name = "spam"

		:param config: The unparsed TOML config for the :pep621:`project table <table-name>`.
		"""

        normalized_name = _NormalisedName(normalize(config["name"]))
        normalized_name.unnormalized = config["name"]

        # https://packaging.python.org/specifications/core-metadata/#name
        if not name_re.match(normalized_name):
            raise BadConfigError("The value for 'project.name' is invalid.")

        return normalized_name
示例#3
0
    def compile_target_requirements(self) -> None:

        theme_name = normalize(self._globals["sphinx_html_theme"])
        pypi_name = normalize(self._globals["pypi_name"])

        for name, specifier in self.theme_versions.items():
            if normalize(name) == theme_name:
                self.target_requirements.add(
                    ComparableRequirement(f"{name}{specifier}"))
                break
        else:
            self.target_requirements.add(ComparableRequirement(theme_name))

        for name, specifier in self.my_sphinx_extensions.items():
            if name != pypi_name:
                self.target_requirements.add(
                    ComparableRequirement(f"{name}{specifier}"))
示例#4
0
def requirement(requirement: str, file: Optional[str] = None) -> int:
    """
	Add a requirement.
	"""

    # 3rd party
    from consolekit.utils import abort
    from domdf_python_tools.paths import PathPlus, traverse_to_file
    from domdf_python_tools.stringlist import StringList
    from packaging.requirements import InvalidRequirement
    from packaging.specifiers import SpecifierSet
    from shippinglabel import normalize_keep_dot
    from shippinglabel.requirements import ComparableRequirement, combine_requirements, read_requirements

    repo_dir: PathPlus = traverse_to_file(PathPlus.cwd(), "repo_helper.yml",
                                          "git_helper.yml")

    if file is None:
        requirements_file = repo_dir / "requirements.txt"

        if not requirements_file.is_file():
            raise abort(f"'{file}' not found.")

    else:
        requirements_file = PathPlus(file)

        if not requirements_file.is_file():
            raise abort("'requirements.txt' not found.")

    try:
        req = ComparableRequirement(requirement)
    except InvalidRequirement as e:
        raise BadRequirement(requirement, e)

    response = (PYPI_API / req.name / "json/").get()
    if response.status_code != 200:
        raise click.BadParameter(f"No such project {req.name}")
    else:
        req.name = normalize(response.json()["info"]["name"])
        if not req.specifier:
            req.specifier = SpecifierSet(
                f">={response.json()['info']['version']}")

        click.echo(f"Adding requirement '{req}'")

    requirements, comments, invalid_lines = read_requirements(
        req_file=requirements_file,
        include_invalid=True,
        normalize_func=normalize_keep_dot,
    )

    requirements.add(req)

    buf = StringList([*comments, *invalid_lines])
    buf.extend(str(req) for req in sorted(combine_requirements(requirements)))
    requirements_file.write_lines(buf)

    return 0
示例#5
0
    def parse_entry_points(
            self, config: Dict[str, TOML_TYPES]) -> Dict[str, Dict[str, str]]:
        """
		Parse the :pep621:`entry-points` table.

		**Format**: :toml:`Table` of :toml:`tables <table>`, with keys and values of :toml:`strings <string>`

		Each sub-table's name is an entry point group.

		* Users MUST NOT create nested sub-tables but instead keep the entry point groups to only one level deep.
		* Users MUST NOT created sub-tables for ``console_scripts`` or ``gui_scripts``.
		  Use ``[project.scripts]`` and ``[project.gui-scripts]`` instead.

		See the `entry point specification`_ for more details.

		.. _entry point specification: https://packaging.python.org/specifications/entry-points/

		:bold-title:`Example:`

		.. code-block:: TOML

			[project.entry-points."spam.magical"]
			tomatoes = "spam:main_tomatoes"

			# pytest plugins refer to a module, so there is no ':obj'
			[project.entry-points.pytest11]
			nbval = "nbval.plugin"

		:param config: The unparsed TOML config for the :pep621:`project table <table-name>`.

		:rtype:

		.. latex:clearpage::
		"""

        entry_points = config["entry-points"]

        self.assert_type(entry_points, dict, ["project", "entry-points"])

        for group, sub_table in entry_points.items():

            self.assert_value_type(sub_table, dict,
                                   ["project", "entry-points", group])

            if normalize(group) in "console-scripts":
                name = construct_path(["project", "entry-points"])
                suggested_name = construct_path(["project", "scripts"])
                raise BadConfigError(
                    f"{name!r} may not contain a {group!r} sub-table. Use {suggested_name!r} instead."
                )
            elif normalize(group) in "gui-scripts":
                name = construct_path(["project", "entry-points"])
                suggested_name = construct_path(["project", "gui-scripts"])
                raise BadConfigError(
                    f"{name!r} may not contain a {group!r} sub-table. Use {suggested_name!r} instead."
                )

            for name, func in sub_table.items():
                self.assert_value_type(
                    func, str, ["project", "entry-points", group, name])

        return entry_points
示例#6
0
def stubs(add: Optional[bool] = None,
          force_tty: bool = False,
          no_pager: bool = False):
    """
	Suggest :pep:`561` type stubs.
	"""

    # stdlib
    import shutil
    import sys
    from itertools import chain

    # 3rd party
    import tabulate
    from apeye import URL
    from apeye.requests_url import TrailingRequestsURL
    from domdf_python_tools.paths import PathPlus
    from domdf_python_tools.stringlist import StringList
    from shippinglabel import normalize
    from shippinglabel.pypi import PYPI_API
    from shippinglabel.requirements import combine_requirements, read_requirements

    # this package
    from repo_helper.core import RepoHelper

    rh = RepoHelper(PathPlus.cwd())
    rh.load_settings()
    config = rh.templates.globals

    requirements_files = [rh.target_repo / "requirements.txt"]

    if config["enable_tests"]:
        requirements_files.append(rh.target_repo / config["tests_dir"] /
                                  "requirements.txt")

    requirements_files.extend(
        (rh.target_repo /
         config["import_name"]).iterchildren("**/requirements.txt"))

    all_requirements = set(
        chain.from_iterable(
            read_requirements(file, include_invalid=True)[0]
            for file in requirements_files))

    stubs_file = rh.target_repo / "stubs.txt"

    if stubs_file.is_file():
        existing_stubs, stub_comments, invalid_stubs = read_requirements(
            stubs_file, include_invalid=True)
    else:
        existing_stubs = set()
        stub_comments, invalid_stubs = [], []

    suggestions = {}

    for requirement in all_requirements:
        if normalize(requirement.name) in {"typing-extensions"}:
            continue

        types_url = TrailingRequestsURL(
            PYPI_API / f"types-{requirement.name.lower()}" / "json/")
        stubs_url = TrailingRequestsURL(
            PYPI_API / f"{requirement.name.lower()}-stubs" / "json/")

        response = stubs_url.head()
        if response.status_code == 404:
            # No stubs found for -stubs
            response = types_url.head()
            if response.status_code == 404:
                # No stubs found for types-
                continue
            else:
                response_url = URL(response.url)
                suggestions[str(requirement)] = response_url.parent.name
                # print(requirement, response.url)
        else:
            response_url = URL(response.url)
            suggestions[str(requirement)] = response_url.parent.name
            # print(requirement, response.url)

    if not suggestions:
        if sys.stdout.isatty() or force_tty:
            click.echo("No stubs to suggest.")
        sys.exit(1)

    if sys.stdout.isatty() or force_tty:

        table = StringList([
            "Suggestions",
            "-----------",
            tabulate.tabulate(suggestions.items(),
                              headers=["Requirement", "Stubs"]),
        ])
        table.blankline(ensure_single=True)

        if no_pager or len(table) <= shutil.get_terminal_size().lines:
            click.echo('\n'.join(table))
        else:
            click.echo_via_pager('\n'.join(table))

        if add is None:
            add = confirm("Do you want to add these to the 'stubs.txt' file?")

        if add:
            new_stubs = sorted(
                combine_requirements(*existing_stubs, *suggestions.values()))

            stubs_file.write_lines([
                *stub_comments,
                *invalid_stubs,
                *map(str, new_stubs),
            ])

    else:
        for stub in suggestions.values():
            click.echo(stub)

    sys.exit(0)
示例#7
0
    def load(
        cls: Type[_PP],
        filename: PathLike,
        set_defaults: bool = False,
    ) -> _PP:
        """
		Load the ``pyproject.toml`` configuration mapping from the given file.

		:param filename:
		:param set_defaults: If :py:obj:`True`, passes ``set_defaults=True``
			the :meth:`parse() <dom_toml.parser.AbstractConfigParser.parse>` method on
			:attr:`~.build_system_table_parser` and :attr:`~.project_table_parser`.
		"""

        filename = PathPlus(filename)

        project_dir = filename.parent
        config = dom_toml.load(filename)

        keys = set(config.keys())

        build_system_table: Optional[BuildSystemDict] = None
        project_table: Optional[ProjectDict] = None
        tool_table: Dict[str, Dict[str, Any]] = {}

        with in_directory(project_dir):
            if "build-system" in config:
                build_system_table = cls.build_system_table_parser.parse(
                    config["build-system"], set_defaults=set_defaults)
                keys.remove("build-system")

            if "project" in config:
                project_table = cls.project_table_parser.parse(
                    config["project"], set_defaults=set_defaults)
                keys.remove("project")

            if "tool" in config:
                tool_table = config["tool"]
                keys.remove("tool")

                for tool_name, tool_subtable in tool_table.items():
                    if tool_name in cls.tool_parsers:
                        tool_table[tool_name] = cls.tool_parsers[
                            tool_name].parse(tool_subtable)

        if keys:
            allowed_top_level = ("build-system", "project", "tool")

            for top_level_key in sorted(keys):
                if top_level_key in allowed_top_level:
                    continue

                if normalize(top_level_key) in allowed_top_level:
                    raise BadConfigError(
                        f"Unexpected top-level key {top_level_key!r}. "
                        f"Did you mean {normalize(top_level_key)!r}?", )

                raise BadConfigError(
                    f"Unexpected top-level key {top_level_key!r}. "
                    f"Only {word_join(allowed_top_level, use_repr=True)} are allowed.",
                )

        return cls(
            build_system=build_system_table,
            project=project_table,
            tool=tool_table,
        )
示例#8
0
	def normalise_underscore(name: str) -> str:
		return normalize(name.strip()).replace('-', '_')