Ejemplo n.º 1
0
class Black(PythonToolBase):
    options_scope = "black"
    name = "Black"
    help = "The Black Python code formatter (https://black.readthedocs.io/)."

    default_version = "black==22.1.0"
    default_main = ConsoleScript("black")

    register_interpreter_constraints = True
    default_interpreter_constraints = ["CPython>=3.7,<4"]

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.lint.black", "black.lock")
    default_lockfile_path = "src/python/pants/backend/python/lint/black/black.lock"
    default_lockfile_url = git_url(default_lockfile_path)
    default_extra_requirements = ['typing-extensions>=3.10.0.0; python_version < "3.10"']

    skip = SkipOption("fmt", "lint")
    args = ArgsListOption(example="--target-version=py37 --quiet")
    export = ExportToolOption()
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls: softwrap(
            f"""
            Path to a TOML config file understood by Black
            (https://github.com/psf/black#configuration-format).

            Setting this option will disable `[{cls.options_scope}].config_discovery`. Use
            this option if the config is located in a non-standard location.
            """
        ),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls: softwrap(
            f"""
            If true, Pants will include any relevant pyproject.toml config files during runs.

            Use `[{cls.options_scope}].config` instead if your config is in a
            non-standard location.
            """
        ),
    )

    def config_request(self, dirs: Iterable[str]) -> ConfigFilesRequest:
        # Refer to https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#where-black-looks-for-the-file
        # for how Black discovers config.
        candidates = {os.path.join(d, "pyproject.toml"): b"[tool.black]" for d in ("", *dirs)}
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_content=candidates,
        )
Ejemplo n.º 2
0
class Hadolint(TemplatedExternalTool):
    options_scope = "hadolint"
    name = "Hadolint"
    help = "A linter for Dockerfiles."

    default_version = "v2.10.0"
    default_known_versions = [
        "v2.10.0|macos_x86_64|59f0523069a857ae918b8ac0774230013f7bcc00c1ea28119c2311353120867a|2514960",
        "v2.10.0|macos_arm64 |59f0523069a857ae918b8ac0774230013f7bcc00c1ea28119c2311353120867a|2514960",  # same as mac x86
        "v2.10.0|linux_x86_64|8ee6ff537341681f9e91bae2d5da451b15c575691e33980893732d866d3cefc4|2301804",
        "v2.10.0|linux_arm64 |b53d5ab10707a585c9e72375d51b7357522300b5329cfa3f91e482687176e128|27954520",
    ]
    default_url_template = (
        "https://github.com/hadolint/hadolint/releases/download/{version}/hadolint-{platform}"
    )
    default_url_platform_mapping = {
        "macos_arm64": "Darwin-x86_64",
        "macos_x86_64": "Darwin-x86_64",
        "linux_arm64": "Linux-arm64",
        "linux_x86_64": "Linux-x86_64",
    }

    skip = SkipOption("lint")
    args = ArgsListOption(example="--format json")
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls: softwrap(f"""
            Path to an YAML config file understood by Hadolint
            (https://github.com/hadolint/hadolint#configure).

            Setting this option will disable `[{cls.options_scope}].config_discovery`. Use
            this option if the config is located in a non-standard location.
            """),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls: softwrap(f"""
            If true, Pants will include all relevant config files during runs
            (`.hadolint.yaml` and `.hadolint.yml`).

            Use `[{cls.options_scope}].config` instead if your config is in a
            non-standard location.
            """),
    )

    def config_request(self) -> ConfigFilesRequest:
        # Refer to https://github.com/hadolint/hadolint#configure for how config files are
        # discovered.
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_existence=[".hadolint.yaml", ".hadolint.yml"],
        )
Ejemplo n.º 3
0
class Bandit(PythonToolBase):
    options_scope = "bandit"
    name = "Bandit"
    help = "A tool for finding security issues in Python code (https://bandit.readthedocs.io)."

    # When upgrading, check if Bandit has started using PEP 517 (a `pyproject.toml` file). If so,
    # remove `setuptools` from `default_extra_requirements`.
    default_version = "bandit>=1.7.0,<1.8"
    default_extra_requirements = [
        "setuptools",
        # GitPython 3.1.20 was yanked because it breaks Python 3.8+, but Poetry's lockfile
        # generation still tries to use it. Upgrade this to the newest version once released or
        # when switching away from Poetry.
        "GitPython==3.1.18",
    ]
    default_main = ConsoleScript("bandit")

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.lint.bandit", "bandit.lock")
    default_lockfile_path = "src/python/pants/backend/python/lint/bandit/bandit.lock"
    default_lockfile_url = git_url(default_lockfile_path)

    skip = SkipOption("lint")
    args = ArgsListOption(example="--skip B101,B308 --confidence")
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=(
            "Path to a Bandit YAML config file "
            "(https://bandit.readthedocs.io/en/latest/config.html)."
        ),
    )

    @property
    def config_request(self) -> ConfigFilesRequest:
        # Refer to https://bandit.readthedocs.io/en/latest/config.html. Note that there are no
        # default locations for Bandit config files.
        return ConfigFilesRequest(
            specified=self.config, specified_option_name=f"{self.options_scope}.config"
        )
Ejemplo n.º 4
0
class TwineSubsystem(PythonToolBase):
    options_scope = "twine"
    name = "Twine"
    help = "The utility for publishing Python distributions to PyPi and other Python repositories."

    default_version = "twine>=3.7.1,<3.8"
    default_main = ConsoleScript("twine")

    # This explicit dependency resolves a weird behavior in poetry, where it would include a sys
    # platform constraint on "Windows" when this was included transitively from the twine
    # requirements.
    # See: https://github.com/pantsbuild/pants/pull/13594#issuecomment-968154931
    default_extra_requirements = ["colorama>=0.4.3"]

    register_interpreter_constraints = True
    default_interpreter_constraints = ["CPython>=3.7,<4"]

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.subsystems",
                                 "twine.lock")
    default_lockfile_path = "src/python/pants/backend/python/subsystems/twine.lock"
    default_lockfile_url = git_url(default_lockfile_path)

    skip = SkipOption("publish")
    args = ArgsListOption(example="--skip-existing")
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls:
        ("Path to a .pypirc config file to use. "
         "(https://packaging.python.org/specifications/pypirc/)\n\n"
         f"Setting this option will disable `[{cls.options_scope}].config_discovery`. Use "
         "this option if the config is located in a non-standard location."),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls:
        ("If true, Pants will include all relevant config files during runs "
         "(`.pypirc`).\n\n"
         f"Use `[{cls.options_scope}].config` instead if your config is in a "
         "non-standard location."),
    )
    ca_certs_path = StrOption(
        "--ca-certs-path",
        advanced=True,
        default="<inherit>",
        help=
        ("Path to a file containing PEM-format CA certificates used for verifying secure "
         "connections when publishing python distributions.\n\n"
         'Uses the value from `[GLOBAL].ca_certs_path` by default. Set to `"<none>"` to '
         "not use the default CA certificate."),
    )

    def config_request(self) -> ConfigFilesRequest:
        # Refer to https://twine.readthedocs.io/en/latest/#configuration for how config files are
        # discovered.
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_existence=[".pypirc"],
        )

    def ca_certs_digest_request(
            self, default_ca_certs_path: str | None) -> CreateDigest | None:
        ca_certs_path: str | None = self.ca_certs_path
        if ca_certs_path == "<inherit>":
            ca_certs_path = default_ca_certs_path
        if not ca_certs_path or ca_certs_path == "<none>":
            return None

        # The certs file will typically not be in the repo, so we can't digest it via a PathGlobs.
        # Instead we manually create a FileContent for it.
        ca_certs_content = Path(ca_certs_path).read_bytes()
        chrooted_ca_certs_path = os.path.basename(ca_certs_path)
        return CreateDigest((FileContent(chrooted_ca_certs_path,
                                         ca_certs_content), ))
Ejemplo n.º 5
0
class MyPy(PythonToolBase):
    options_scope = "mypy"
    name = "MyPy"
    help = "The MyPy Python type checker (http://mypy-lang.org/)."

    default_version = "mypy==0.910"
    default_main = ConsoleScript("mypy")

    # See `mypy/rules.py`. We only use these default constraints in some situations.
    register_interpreter_constraints = True
    default_interpreter_constraints = ["CPython>=3.7,<4"]

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.typecheck.mypy",
                                 "mypy.lock")
    default_lockfile_path = "src/python/pants/backend/python/typecheck/mypy/mypy.lock"
    default_lockfile_url = git_url(default_lockfile_path)
    uses_requirements_from_source_plugins = True

    skip = SkipOption("check")
    args = ArgsListOption(example="--python-version 3.7 --disallow-any-expr")
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls:
        ("Path to a config file understood by MyPy "
         "(https://mypy.readthedocs.io/en/stable/config_file.html).\n\n"
         f"Setting this option will disable `[{cls.options_scope}].config_discovery`. Use "
         f"this option if the config is located in a non-standard location."),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls:
        ("If true, Pants will include any relevant config files during "
         "runs (`mypy.ini`, `.mypy.ini`, and `setup.cfg`)."
         f"\n\nUse `[{cls.options_scope}].config` instead if your config is in a "
         f"non-standard location."),
    )
    _source_plugins = TargetListOption(
        "--source-plugins",
        advanced=True,
        help=
        ("An optional list of `python_sources` target addresses to load first-party "
         "plugins.\n\n"
         "You must also set `plugins = path.to.module` in your `mypy.ini`, and "
         "set the `[mypy].config` option in your `pants.toml`.\n\n"
         "To instead load third-party plugins, set the option `[mypy].extra_requirements` "
         "and set the `plugins` option in `mypy.ini`."
         "Tip: it's often helpful to define a dedicated 'resolve' via "
         "`[python].resolves` for your MyPy plugins such as 'mypy-plugins' "
         "so that the third-party requirements used by your plugin, like `mypy`, do not "
         "mix with the rest of your project. Read that option's help message for more info "
         "on resolves."),
    )
    extra_type_stubs = StrListOption(
        "--extra-type-stubs",
        advanced=True,
        help=
        ("Extra type stub requirements to install when running MyPy.\n\n"
         "Normally, type stubs can be installed as typical requirements, such as putting "
         "them in `requirements.txt` or using a `python_requirement` target."
         "Alternatively, you can use this option so that the dependencies are solely "
         "used when running MyPy and are not runtime dependencies.\n\n"
         "Expects a list of pip-style requirement strings, like "
         "`['types-requests==2.25.9']`."),
    )

    @property
    def config_request(self) -> ConfigFilesRequest:
        # Refer to https://mypy.readthedocs.io/en/stable/config_file.html.
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"{self.options_scope}.config",
            discovery=self.config_discovery,
            check_existence=["mypy.ini", ".mypy.ini"],
            check_content={
                "setup.cfg": b"[mypy",
                "pyproject.toml": b"[tool.mypy"
            },
        )

    @property
    def source_plugins(self) -> UnparsedAddressInputs:
        return UnparsedAddressInputs(self._source_plugins, owning_address=None)

    def check_and_warn_if_python_version_configured(
            self, config: FileContent | None) -> bool:
        """Determine if we can dynamically set `--python-version` and warn if not."""
        configured = []
        if config and b"python_version" in config.content:
            configured.append(
                f"`python_version` in {config.path} (which is used because of either config "
                "discovery or the `[mypy].config` option)")
        if "--py2" in self.args:
            configured.append("`--py2` in the `--mypy-args` option")
        if any(arg.startswith("--python-version") for arg in self.args):
            configured.append("`--python-version` in the `--mypy-args` option")
        if configured:
            formatted_configured = " and you set ".join(configured)
            logger.warning(
                f"You set {formatted_configured}. Normally, Pants would automatically set this "
                "for you based on your code's interpreter constraints "
                f"({doc_url('python-interpreter-compatibility')}). Instead, it will "
                "use what you set.\n\n"
                "(Automatically setting the option allows Pants to partition your targets by their "
                "constraints, so that, for example, you can run MyPy on Python 2-only code and "
                "Python 3-only code at the same time. This feature may no longer work.)"
            )
        return bool(configured)
Ejemplo n.º 6
0
class Pylint(PythonToolBase):
    options_scope = "pylint"
    name = "Pylint"
    help = "The Pylint linter for Python code (https://www.pylint.org/)."

    default_version = "pylint>=2.11.0,<2.12"
    default_main = ConsoleScript("pylint")

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.lint.pylint",
                                 "pylint.lock")
    default_lockfile_path = "src/python/pants/backend/python/lint/pylint/pylint.lock"
    default_lockfile_url = git_url(default_lockfile_path)
    uses_requirements_from_source_plugins = True

    skip = SkipOption("lint")
    args = ArgsListOption(
        example="--ignore=foo.py,bar.py --disable=C0330,W0311")
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls:
        ("Path to a config file understood by Pylint "
         "(http://pylint.pycqa.org/en/latest/user_guide/run.html#command-line-options).\n\n"
         f"Setting this option will disable `[{cls.options_scope}].config_discovery`. Use "
         f"this option if the config is located in a non-standard location."),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls:
        ("If true, Pants will include any relevant config files during "
         "runs (`.pylintrc`, `pylintrc`, `pyproject.toml`, and `setup.cfg`)."
         f"\n\nUse `[{cls.options_scope}].config` instead if your config is in a "
         f"non-standard location."),
    )
    _source_plugins = TargetListOption(
        "--source-plugins",
        advanced=True,
        help=
        ("An optional list of `python_sources` target addresses to load first-party "
         "plugins.\n\nYou must set the plugin's parent directory as a source root. For "
         "example, if your plugin is at `build-support/pylint/custom_plugin.py`, add "
         "'build-support/pylint' to `[source].root_patterns` in `pants.toml`. This is "
         "necessary for Pants to know how to tell Pylint to discover your plugin. See "
         f"{doc_url('source-roots')}\n\n"
         f"You must also set `load-plugins=$module_name` in your Pylint config file.\n\n"
         "While your plugin's code can depend on other first-party code and third-party "
         "requirements, all first-party dependencies of the plugin must live in the same "
         "directory or a subdirectory.\n\n"
         "To instead load third-party plugins, set the "
         "option `[pylint].extra_requirements` and set the `load-plugins` option in your "
         "Pylint config.\n\n"
         "Tip: it's often helpful to define a dedicated 'resolve' via "
         "`[python].resolves` for your Pylint plugins such as 'pylint-plugins' "
         "so that the third-party requirements used by your plugin, like `pylint`, do not "
         "mix with the rest of your project. Read that option's help message for more info "
         "on resolves."),
    )

    def config_request(self, dirs: Iterable[str]) -> ConfigFilesRequest:
        # Refer to http://pylint.pycqa.org/en/latest/user_guide/run.html#command-line-options for
        # how config files are discovered.
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_existence=[
                ".pylinrc", *(os.path.join(d, "pylintrc") for d in ("", *dirs))
            ],
            check_content={
                "pyproject.toml": b"[tool.pylint]",
                "setup.cfg": b"[pylint."
            },
        )

    @property
    def source_plugins(self) -> UnparsedAddressInputs:
        return UnparsedAddressInputs(self._source_plugins, owning_address=None)
Ejemplo n.º 7
0
class Flake8(PythonToolBase):
    options_scope = "flake8"
    name = "Flake8"
    help = "The Flake8 Python linter (https://flake8.pycqa.org/)."

    default_version = "flake8>=3.9.2,<4.0"
    default_main = ConsoleScript("flake8")

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.lint.flake8", "flake8.lock")
    default_lockfile_path = "src/python/pants/backend/python/lint/flake8/flake8.lock"
    default_lockfile_url = git_url(default_lockfile_path)

    skip = SkipOption("lint")
    args = ArgsListOption(example="--ignore E123,W456 --enable-extensions H111")
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls: (
            "Path to an INI config file understood by Flake8 "
            "(https://flake8.pycqa.org/en/latest/user/configuration.html).\n\n"
            f"Setting this option will disable `[{cls.options_scope}].config_discovery`. Use "
            f"this option if the config is located in a non-standard location."
        ),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls: (
            "If true, Pants will include any relevant config files during "
            "runs (`.flake8`, `flake8`, `setup.cfg`, and `tox.ini`)."
            f"\n\nUse `[{cls.options_scope}].config` instead if your config is in a "
            f"non-standard location."
        ),
    )
    _source_plugins = TargetListOption(
        "--source-plugins",
        advanced=True,
        help=(
            "An optional list of `python_sources` target addresses to load first-party "
            "plugins.\n\nYou must set the plugin's parent directory as a source root. For "
            "example, if your plugin is at `build-support/flake8/custom_plugin.py`, add "
            "'build-support/flake8' to `[source].root_patterns` in `pants.toml`. This is "
            "necessary for Pants to know how to tell Flake8 to discover your plugin. See "
            f"{doc_url('source-roots')}\n\nYou must also set `[flake8:local-plugins]` in "
            "your Flake8 config file. "
            "For example:\n\n"
            "```\n"
            "[flake8:local-plugins]\n"
            "    extension =\n"
            "        CUSTOMCODE = custom_plugin:MyChecker\n"
            "```\n\n"
            "While your plugin's code can depend on other first-party code and third-party "
            "requirements, all first-party dependencies of the plugin must live in the same "
            "directory or a subdirectory.\n\n"
            "To instead load third-party plugins, set the option "
            "`[flake8].extra_requirements`.\n\n"
            "Tip: it's often helpful to define a dedicated 'resolve' via "
            "`[python].resolves` for your Flake8 plugins such as 'flake8-plugins' "
            "so that the third-party requirements used by your plugin, like `flake8`, do not "
            "mix with the rest of your project. Read that option's help message for more info "
            "on resolves."
        ),
    )

    @property
    def config_request(self) -> ConfigFilesRequest:
        # See https://flake8.pycqa.org/en/latest/user/configuration.html#configuration-locations
        # for how Flake8 discovers config files.
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_existence=["flake8", ".flake8"],
            check_content={"setup.cfg": b"[flake8]", "tox.ini": b"[flake8]"},
        )

    @property
    def source_plugins(self) -> UnparsedAddressInputs:
        return UnparsedAddressInputs(self._source_plugins, owning_address=None)
Ejemplo n.º 8
0
class PythonSetup(Subsystem):
    options_scope = "python"
    help = "Options for Pants's Python backend."

    default_interpreter_constraints = ["CPython>=3.7,<4"]
    default_interpreter_universe = ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]

    interpreter_constraints = StrListOption(
        "--interpreter-constraints",
        default=default_interpreter_constraints,
        help=softwrap(
            """
            The Python interpreters your codebase is compatible with.

            These constraints are used as the default value for the `interpreter_constraints`
            field of Python targets.

            Specify with requirement syntax, e.g. 'CPython>=2.7,<3' (A CPython interpreter with
            version >=2.7 AND version <3) or 'PyPy' (A pypy interpreter of any version). Multiple
            constraint strings will be ORed together.
            """
        ),
        advanced=True,
        metavar="<requirement>",
    )
    interpreter_universe = StrListOption(
        "--interpreter-versions-universe",
        default=default_interpreter_universe,
        help=softwrap(
            f"""
            All known Python major/minor interpreter versions that may be used by either
            your code or tools used by your code.

            This is used by Pants to robustly handle interpreter constraints, such as knowing
            when generating lockfiles which Python versions to check if your code is using.

            This does not control which interpreter your code will use. Instead, to set your
            interpreter constraints, update `[python].interpreter_constraints`, the
            `interpreter_constraints` field, and relevant tool options like
            `[isort].interpreter_constraints` to tell Pants which interpreters your code
            actually uses. See {doc_url('python-interpreter-compatibility')}.

            All elements must be the minor and major Python version, e.g. '2.7' or '3.10'. Do
            not include the patch version.
            """
        ),
        advanced=True,
    )
    enable_resolves = BoolOption(
        "--enable-resolves",
        default=False,
        help=softwrap(
            f"""
            Set to true to enable lockfiles for user code. See `[python].resolves` for an
            explanation of this feature.

            Warning: the `generate-lockfiles` goal does not yet work if you have local
            requirements, regardless of using Pex vs. Poetry for the lockfile generator.
            Support is coming in a future Pants release. In the meantime, the workaround is to host
            the files in a custom repository with `[python-repos]`
            ({doc_url('python-third-party-dependencies#custom-repositories')}).

            You may also run into issues generating lockfiles when using Poetry as the generator,
            rather than Pex. See the option `[python].lockfile_generator` for more
            information.

            This option is mutually exclusive with `[python].requirement_constraints`. We strongly
            recommend using this option because it:

              1. Uses `--hash` to validate that all downloaded files are expected, which reduces\
                the risk of supply chain attacks.
              2. Enforces that all transitive dependencies are in the lockfile, whereas\
                constraints allow you to leave off dependencies. This ensures your build is more\
                stable and reduces the risk of supply chain attacks.
              3. Allows you to have multiple lockfiles in your repository.
            """
        ),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolves = DictOption[str](
        "--resolves",
        default={"python-default": "3rdparty/python/default.lock"},
        help=softwrap(
            f"""
            A mapping of logical names to lockfile paths used in your project.

            Many organizations only need a single resolve for their whole project, which is
            a good default and often the simplest thing to do. However, you may need multiple
            resolves, such as if you use two conflicting versions of a requirement in
            your repository.

            If you only need a single resolve, run `{bin_name()} generate-lockfiles` to
            generate the lockfile.

            If you need multiple resolves:

              1. Via this option, define multiple resolve names and their lockfile paths.\
                The names should be meaningful to your repository, such as `data-science` or\
                `pants-plugins`.
              2. Set the default with `[python].default_resolve`.
              3. Update your `python_requirement` targets with the `resolve` field to declare which\
                resolve they should be available in. They default to `[python].default_resolve`,\
                so you only need to update targets that you want in non-default resolves.\
                (Often you'll set this via the `python_requirements` or `poetry_requirements`\
                target generators)
              4. Run `{bin_name()} generate-lockfiles` to generate the lockfiles. If the results\
                aren't what you'd expect, adjust the prior step.
              5. Update any targets like `python_source` / `python_sources`,\
                `python_test` / `python_tests`, and `pex_binary` which need to set a non-default\
                resolve with the `resolve` field.

            If a target can work with multiple resolves, you can either use the `parametrize`
            mechanism or manually create a distinct target per resolve. See {doc_url("targets")}
            for information about `parametrize`.

            For example:

                python_sources(
                    resolve=parametrize("data-science", "web-app"),
                )

            You can name the lockfile paths what you would like; Pants does not expect a
            certain file extension or location.

            Only applies if `[python].enable_resolves` is true.
            """
        ),
        advanced=True,
    )
    default_resolve = StrOption(
        "--default-resolve",
        default="python-default",
        help=softwrap(
            """
            The default value used for the `resolve` field.

            The name must be defined as a resolve in `[python].resolves`.
            """
        ),
        advanced=True,
    )
    _resolves_to_interpreter_constraints = DictOption["list[str]"](
        "--resolves-to-interpreter-constraints",
        help=softwrap(
            """
            Override the interpreter constraints to use when generating a resolve's lockfile
            with the `generate-lockfiles` goal.

            By default, each resolve from `[python].resolves` will use your
            global interpreter constraints set in `[python].interpreter_constraints`. With
            this option, you can override each resolve to use certain interpreter
            constraints, such as `{'data-science': ['==3.8.*']}`.

            Warning: this does NOT impact the interpreter constraints used by targets within the
            resolve, which is instead set by the option `[python.interpreter_constraints` and the
            `interpreter_constraints` field. It only impacts how the lockfile is generated.

            Pants will validate that the interpreter constraints of your code using a
            resolve are compatible with that resolve's own constraints. For example, if your
            code is set to use ['==3.9.*'] via the `interpreter_constraints` field, but it's
            using a resolve whose interpreter constraints are set to ['==3.7.*'], then
            Pants will error explaining the incompatibility.

            The keys must be defined as resolves in `[python].resolves`.
            """
        ),
        advanced=True,
    )
    invalid_lockfile_behavior = EnumOption(
        "--invalid-lockfile-behavior",
        default=InvalidLockfileBehavior.error,
        help=softwrap(
            """
            The behavior when a lockfile has requirements or interpreter constraints that are
            not compatible with what the current build is using.

            We recommend keeping the default of `error` for CI builds.

            Note that `warn` will still expect a Pants lockfile header, it only won't error if
            the lockfile is stale and should be regenerated. Use `ignore` to avoid needing a
            lockfile header at all, e.g. if you are manually managing lockfiles rather than
            using the `generate-lockfiles` goal.
            """
        ),
        advanced=True,
    )
    _lockfile_generator = EnumOption(
        "--lockfile-generator",
        default=LockfileGenerator.PEX,
        help=softwrap(
            f"""
            Whether to use Pex or Poetry with the `generate-lockfiles` goal.

            Poetry does not support these features:

              1) `[python-repos]` for custom indexes/cheeseshops.
              2) VCS (Git) requirements.
              3) `[GLOBAL].ca_certs_path`.

            If you use any of these features, you should use Pex.

            Several users have also had issues with how Poetry's lockfile generation handles
            environment markers for transitive dependencies; certain dependencies end up with
            nonsensical environment markers which cause the dependency to not be installed, then
            for Pants/Pex to complain the dependency is missing, even though it's in the
            lockfile. There is a workaround: for `[python].resolves`, manually create a
            `python_requirement` target for the problematic transitive dependencies so that they
            are seen as direct requirements, rather than transitive. For tool lockfiles, add the
            problematic transitive dependency to `[tool].extra_requirements`, e.g.
            `[isort].extra_requirements`. Then, regenerate the lockfile(s) with the
            `generate-lockfiles` goal. Alternatively, use Pex for generation.

            Finally, installing from a Poetry-generated lockfile is slower than installing from a
            Pex lockfile. When using a Pex lockfile, Pants will only install the subset needed
            for the current task.

            However, Pex lockfile generation is a new feature. Given how vast the Python packaging
            ecosystem is, it is possible you may experience edge cases / bugs we haven't yet
            covered. Bug reports are appreciated!
            https://github.com/pantsbuild/pants/issues/new/choose

            Note that while Pex generates locks in a proprietary JSON format, you can use the
            `{bin_name()} export` goal for Pants to create a virtual environment for
            interoperability with tools like IDEs.
            """
        ),
        advanced=True,
    )
    resolves_generate_lockfiles = BoolOption(
        "--resolves-generate-lockfiles",
        default=True,
        help=softwrap(
            """
            If False, Pants will not attempt to generate lockfiles for `[python].resolves` when
            running the `generate-lockfiles` goal.

            This is intended to allow you to manually generate lockfiles as a workaround for the
            issues described in the `[python].lockfile_generator` option, if you are not yet ready
            to use Pex.

            If you set this to False, Pants will not attempt to validate the metadata headers
            for your user lockfiles. This is useful so that you can keep
            `[python].invalid_lockfile_behavior` to `error` or `warn` if you'd like so that tool
            lockfiles continue to be validated, while user lockfiles are skipped.
            """
        ),
        advanced=True,
    )
    run_against_entire_lockfile = BoolOption(
        "--run-against-entire-lockfile",
        default=False,
        help=softwrap(
            """
            If enabled, when running binaries, tests, and repls, Pants will use the entire
            lockfile file instead of just the relevant subset.

            We generally do not recommend this if `[python].lockfile_generator` is set to `"pex"`
            thanks to performance enhancements we've made. When using Pex lockfiles, you should
            get similar performance to using this option but without the downsides mentioned below.

            Otherwise, if not using Pex lockfiles, this option can improve
            performance and reduce cache size. But it has two consequences: 1) All cached test
            results will be invalidated if any requirement in the lockfile changes, rather
            than just those that depend on the changed requirement. 2) Requirements unneeded
            by a test/run/repl will be present on the sys.path, which might in rare cases
            cause their behavior to change.

            This option does not affect packaging deployable artifacts, such as
            PEX files, wheels and cloud functions, which will still use just the exact
            subset of requirements needed.
            """
        ),
        advanced=True,
    )
    requirement_constraints = FileOption(
        "--requirement-constraints",
        default=None,
        help=softwrap(
            """
            When resolving third-party requirements for your own code (vs. tools you run),
            use this constraints file to determine which versions to use.

            Mutually exclusive with `[python].enable_resolves`, which we generally recommend as an
            improvement over constraints file.

            See https://pip.pypa.io/en/stable/user_guide/#constraints-files for more
            information on the format of constraint files and how constraints are applied in
            Pex and pip.

            This only applies when resolving user requirements, rather than tools you run
            like Black and Pytest. To constrain tools, set `[tool].lockfile`, e.g.
            `[black].lockfile`.
            """
        ),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolve_all_constraints = BoolOption(
        "--resolve-all-constraints",
        default=True,
        help=softwrap(
            """
            (Only relevant when using `[python].requirement_constraints.`) If enabled, when
            resolving requirements, Pants will first resolve your entire
            constraints file as a single global resolve. Then, if the code uses a subset of
            your constraints file, Pants will extract the relevant requirements from that
            global resolve so that only what's actually needed gets used. If disabled, Pants
            will not use a global resolve and will resolve each subset of your requirements
            independently.

            Usually this option should be enabled because it can result in far fewer resolves.
            """
        ),
        advanced=True,
    )
    no_binary = StrListOption(
        "--no-binary",
        help=softwrap(
            """
            Do not use binary packages (i.e., wheels) for these 3rdparty projects.

            Also accepts `:all:` to disable all binary packages.

            Note that some packages are tricky to compile and may fail to install when this option
            is used on them. See https://pip.pypa.io/en/stable/cli/pip_install/#install-no-binary
            for details.

            Note: Only takes effect if you use Pex lockfiles. Set
            `[python].lockfile_generator = "pex"` and run the `generate-lockfiles` goal.
            """
        ),
    )
    only_binary = StrListOption(
        "--only-binary",
        help=softwrap(
            """
            Do not use source packages (i.e., sdists) for these 3rdparty projects.

            Also accepts `:all:` to disable all source packages.

            Packages without binary distributions will fail to install when this option is used on
            them. See https://pip.pypa.io/en/stable/cli/pip_install/#install-only-binary for
            details.

            Note: Only takes effect if you use Pex lockfiles. Set
            `[python].lockfile_generator = "pex"` and run the `generate-lockfiles` goal.
            """
        ),
    )
    resolver_manylinux = StrOption(
        "--resolver-manylinux",
        default="manylinux2014",
        help=softwrap(
            """
            Whether to allow resolution of manylinux wheels when resolving requirements for
            foreign linux platforms. The value should be a manylinux platform upper bound,
            e.g.: 'manylinux2010', or else the string 'no' to disallow.
            """
        ),
        advanced=True,
    )

    tailor_source_targets = BoolOption(
        "--tailor-source-targets",
        default=True,
        help=softwrap(
            """
            If true, add `python_sources`, `python_tests`, and `python_test_utils` targets with
            the `tailor` goal."""
        ),
        advanced=True,
    )
    tailor_ignore_solitary_init_files = BoolOption(
        "--tailor-ignore-solitary-init-files",
        default=True,
        help=softwrap(
            """
            If true, don't add `python_sources` targets for solitary `__init__.py` files with the
            `tailor` goal.

            Solitary `__init__.py` files usually exist as import scaffolding rather than true
            library code, so it can be noisy to add BUILD files.

            Set to false if you commonly have packages containing real code in
            `__init__.py` without other `.py` files in the package.
            """
        ),
        advanced=True,
    )
    tailor_requirements_targets = BoolOption(
        "--tailor-requirements-targets",
        default=True,
        help=softwrap(
            """
            If true, add `python_requirements`, `poetry_requirements`, and `pipenv_requirements`
            target generators with the `tailor` goal.

            `python_requirements` targets are added for any file that matches the pattern
            `*requirements*.txt`. You will need to manually add `python_requirements` for different
            file names like `reqs.txt`.

            `poetry_requirements` targets are added for `pyproject.toml` files with `[tool.poetry`
            in them.
            """
        ),
        advanced=True,
    )
    tailor_pex_binary_targets = BoolOption(
        "--tailor-pex-binary-targets",
        default=True,
        help=softwrap(
            """
            If true, add `pex_binary` targets for Python files named `__main__.py` or with a
            `__main__` clause with the `tailor` goal.
            """
        ),
        advanced=True,
    )

    macos_big_sur_compatibility = BoolOption(
        "--macos-big-sur-compatibility",
        default=False,
        help=softwrap(
            """
            If set, and if running on MacOS Big Sur, use macosx_10_16 as the platform
            when building wheels. Otherwise, the default of macosx_11_0 will be used.
            This may be required for pip to be able to install the resulting distribution
            on Big Sur.
            """
        ),
        advanced=True,
    )

    @property
    def generate_lockfiles_with_pex(self) -> bool:
        """Else, generate with Poetry."""
        return self._lockfile_generator == LockfileGenerator.PEX

    @memoized_property
    def resolves_to_interpreter_constraints(self) -> dict[str, tuple[str, ...]]:
        result = {}
        for resolve, ics in self._resolves_to_interpreter_constraints.items():
            if resolve not in self.resolves:
                raise KeyError(
                    softwrap(
                        f"""
                        Unrecognized resolve name in the option
                        `[python].resolves_to_interpreter_constraints`: {resolve}. Each
                        key must be one of the keys in `[python].resolves`:
                        {sorted(self.resolves.keys())}
                        """
                    )
                )
            result[resolve] = tuple(ics)
        return result

    def resolve_all_constraints_was_set_explicitly(self) -> bool:
        return not self.options.is_default("resolve_all_constraints")

    @property
    def manylinux(self) -> str | None:
        manylinux = cast(Optional[str], self.resolver_manylinux)
        if manylinux is None or manylinux.lower() in ("false", "no", "none"):
            return None
        return manylinux

    @property
    def manylinux_pex_args(self) -> Iterator[str]:
        if self.manylinux:
            yield "--manylinux"
            yield self.manylinux
        else:
            yield "--no-manylinux"

    @property
    def scratch_dir(self):
        return os.path.join(self.options.pants_workdir, *self.options_scope.split("."))

    def compatibility_or_constraints(self, compatibility: Iterable[str] | None) -> tuple[str, ...]:
        """Return either the given `compatibility` field or the global interpreter constraints.

        If interpreter constraints are supplied by the CLI flag, return those only.
        """
        if self.options.is_flagged("interpreter_constraints"):
            return self.interpreter_constraints
        return tuple(compatibility or self.interpreter_constraints)

    def compatibilities_or_constraints(
        self, compatibilities: Iterable[Iterable[str] | None]
    ) -> tuple[str, ...]:
        return tuple(
            constraint
            for compatibility in compatibilities
            for constraint in self.compatibility_or_constraints(compatibility)
        )
Ejemplo n.º 9
0
class CoverageSubsystem(PythonToolBase):
    options_scope = "coverage-py"
    help = "Configuration for Python test coverage measurement."

    default_version = "coverage[toml]>=5.5,<5.6"
    default_main = ConsoleScript("coverage")

    register_interpreter_constraints = True
    default_interpreter_constraints = ["CPython>=3.7,<4"]

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.subsystems",
                                 "coverage_py.lock")
    default_lockfile_path = "src/python/pants/backend/python/subsystems/coverage_py.lock"
    default_lockfile_url = git_url(default_lockfile_path)

    filter = StrListOption(
        "--filter",
        help=softwrap("""
            A list of Python modules or filesystem paths to use in the coverage report, e.g.
            `['helloworld_test', 'helloworld/util/dirutil'].

            Both modules and directory paths are recursive: any submodules or child paths,
            respectively, will be included.

            If you leave this off, the coverage report will include every file
            in the transitive closure of the address/file arguments; for example, `test ::`
            will include every Python file in your project, whereas
            `test project/app_test.py` will include `app_test.py` and any of its transitive
            dependencies.
            """),
    )
    reports = EnumListOption(
        "--report",
        default=[CoverageReportType.CONSOLE],
        help="Which coverage report type(s) to emit.",
    )
    _output_dir = StrOption(
        "--output-dir",
        default=str(PurePath("{distdir}", "coverage", "python")),
        advanced=True,
        help=
        "Path to write the Pytest Coverage report to. Must be relative to the build root.",
    )
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls: softwrap(f"""
            Path to an INI or TOML config file understood by coverage.py
            (https://coverage.readthedocs.io/en/stable/config.html).

            Setting this option will disable `[{cls.options_scope}].config_discovery`. Use
            this option if the config is located in a non-standard location.
            """),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls: softwrap(f"""
            If true, Pants will include any relevant config files during runs
            (`.coveragerc`, `setup.cfg`, `tox.ini`, and `pyproject.toml`).

            Use `[{cls.options_scope}].config` instead if your config is in a
            non-standard location.
            """),
    )
    global_report = BoolOption(
        "--global-report",
        default=False,
        help=softwrap("""
            If true, Pants will generate a global coverage report.

            The global report will include all Python source files in the workspace and not just
            those depended on by the tests that were run.
            """),
    )
    fail_under = FloatOption(
        "--fail-under",
        default=None,
        help=softwrap("""
            Fail if the total combined coverage percentage for all tests is less than this
            number.

            Use this instead of setting fail_under in a coverage.py config file,
            as the config will apply to each test separately, while you typically want this
            to apply to the combined coverage for all tests run.

            Note that you must generate at least one (non-raw) coverage report for this
            check to trigger.

            Note also that if you specify a non-integral value, you must
            also set [report] precision properly in the coverage.py config file to make use
            of the decimal places. See https://coverage.readthedocs.io/en/latest/config.html.
            """),
    )

    def output_dir(self, distdir: DistDir) -> PurePath:
        return PurePath(self._output_dir.format(distdir=distdir.relpath))

    @property
    def config_request(self) -> ConfigFilesRequest:
        # Refer to https://coverage.readthedocs.io/en/stable/config.html.
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_existence=[".coveragerc"],
            check_content={
                "setup.cfg": b"[coverage:",
                "tox.ini": b"[coverage:]",
                "pyproject.toml": b"[tool.coverage",
            },
        )
Ejemplo n.º 10
0
class PythonSetup(Subsystem):
    options_scope = "python"
    help = "Options for Pants's Python backend."

    default_interpreter_constraints = ["CPython>=3.7,<4"]
    default_interpreter_universe = [
        "2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"
    ]

    interpreter_constraints = StrListOption(
        "--interpreter-constraints",
        default=default_interpreter_constraints,
        help=
        ("The Python interpreters your codebase is compatible with.\n\nSpecify with "
         "requirement syntax, e.g. 'CPython>=2.7,<3' (A CPython interpreter with version "
         ">=2.7 AND version <3) or 'PyPy' (A pypy interpreter of any version). Multiple "
         "constraint strings will be ORed together.\n\nThese constraints are used as the "
         "default value for the `interpreter_constraints` field of Python targets."
         ),
        advanced=True,
        metavar="<requirement>",
    )
    interpreter_universe = StrListOption(
        "--interpreter-versions-universe",
        default=default_interpreter_universe,
        help=
        ("All known Python major/minor interpreter versions that may be used by either "
         "your code or tools used by your code.\n\n"
         "This is used by Pants to robustly handle interpreter constraints, such as knowing "
         "when generating lockfiles which Python versions to check if your code is "
         "using.\n\n"
         "This does not control which interpreter your code will use. Instead, to set your "
         "interpreter constraints, update `[python].interpreter_constraints`, the "
         "`interpreter_constraints` field, and relevant tool options like "
         "`[isort].interpreter_constraints` to tell Pants which interpreters your code "
         f"actually uses. See {doc_url('python-interpreter-compatibility')}.\n\n"
         "All elements must be the minor and major Python version, e.g. '2.7' or '3.10'. Do "
         "not include the patch version.\n\n"),
        advanced=True,
    )
    requirement_constraints = FileOption(
        "--requirement-constraints",
        default=None,
        help=
        ("When resolving third-party requirements for your own code (vs. tools you run), "
         "use this constraints file to determine which versions to use.\n\n"
         "This only applies when resolving user requirements, rather than tools you run "
         "like Black and Pytest. To constrain tools, set `[tool].lockfile`, e.g. "
         "`[black].lockfile`.\n\n"
         "See https://pip.pypa.io/en/stable/user_guide/#constraints-files for more "
         "information on the format of constraint files and how constraints are applied in "
         "Pex and pip.\n\n"
         "Mutually exclusive with `[python].enable_resolves`."),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolve_all_constraints = BoolOption(
        "--resolve-all-constraints",
        default=True,
        help=
        ("If enabled, when resolving requirements, Pants will first resolve your entire "
         "constraints file as a single global resolve. Then, if the code uses a subset of "
         "your constraints file, Pants will extract the relevant requirements from that "
         "global resolve so that only what's actually needed gets used. If disabled, Pants "
         "will not use a global resolve and will resolve each subset of your requirements "
         "independently."
         "\n\nUsually this option should be enabled because it can result in far fewer "
         "resolves."
         "\n\nRequires [python].requirement_constraints to be set."),
        advanced=True,
    )
    enable_resolves = BoolOption(
        "--enable-resolves",
        default=False,
        help=
        ("Set to true to enable the multiple resolves mechanism. See "
         "`[python].resolves` for an explanation of this feature.\n\n"
         "Warning: the `generate-lockfiles` goal does not yet work if you have VCS (Git) "
         "requirements and local requirements. Support is coming in a future Pants release. You "
         "can still use multiple resolves, but you must manually generate your lockfiles rather "
         "than using the `generate-lockfiles` goal, e.g. by running `pip freeze`. Specifically, "
         "set up `[python].resolves` to point to your manually generated lockfile paths, and "
         "then set `[python].resolves_generate_lockfiles = false` in `pants.toml`.\n\n"
         "You may also run into issues generating lockfiles when using Poetry as the generator, "
         "rather than Pex. See the option `[python].lockfile_generator` for more "
         "information.\n\n"
         "The resolves feature offers three major benefits compared to "
         "`[python].requirement_constraints`:\n\n"
         "  1. Uses `--hash` to validate that all downloaded files are expected, which "
         "reduces the risk of supply chain attacks.\n"
         "  2. Enforces that all transitive dependencies are in the lockfile, whereas "
         "constraints allow you to leave off dependencies. This ensures your build is more "
         "stable and reduces the risk of supply chain attacks.\n"
         "  3. Allows you to have multiple resolves in your repository.\n\n"
         "Mutually exclusive with `[python].requirement_constraints`."),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolves = DictOption[str](
        "--resolves",
        default={
            "python-default": "3rdparty/python/default.lock"
        },
        help=
        ("A mapping of logical names to lockfile paths used in your project.\n\n"
         "Many organizations only need a single resolve for their whole project, which is "
         "a good default and the simplest thing to do. However, you may need multiple "
         "resolves, such as if you use two conflicting versions of a requirement in "
         "your repository.\n\n"
         "For now, Pants only has first-class support for disjoint resolves, meaning that "
         "you cannot ergonomically set a `python_requirement` or `python_source` target, "
         "for example, to work with multiple resolves. Practically, this means that you "
         "cannot yet ergonomically reuse common code, such as util files, across projects "
         "using different resolves. Support for overlapping resolves is coming in Pants 2.11 "
         "through a new 'parametrization' feature.\n\n"
         f"If you only need a single resolve, run `{bin_name()} generate-lockfiles` to "
         "generate the lockfile.\n\n"
         "If you need multiple resolves:\n\n"
         "  1. Via this option, define multiple resolve "
         "names and their lockfile paths. The names should be meaningful to your "
         "repository, such as `data-science` or `pants-plugins`.\n"
         "  2. Set the default with `[python].default_resolve`.\n"
         "  3. Update your `python_requirement` targets with the "
         "`resolve` field to declare which resolve they should "
         "be available in. They default to `[python].default_resolve`, so you "
         "only need to update targets that you want in non-default resolves. "
         "(Often you'll set this via the `python_requirements` or `poetry_requirements` "
         "target generators)\n"
         f"  4. Run `{bin_name()} generate-lockfiles` to generate the lockfiles. If the results "
         "aren't what you'd expect, adjust the prior step.\n"
         "  5. Update any targets like `python_source` / `python_sources`, "
         "`python_test` / `python_tests`, and `pex_binary` which need to set a non-default "
         "resolve with the `resolve` field.\n\n"
         "You can name the lockfile paths what you would like; Pants does not expect a "
         "certain file extension or location.\n\n"
         "Only applies if `[python].enable_resolves` is true."),
        advanced=True,
    )
    default_resolve = StrOption(
        "--default-resolve",
        default="python-default",
        help=("The default value used for the `resolve` field.\n\n"
              "The name must be defined as a resolve in `[python].resolves`."),
        advanced=True,
    )
    _resolves_to_interpreter_constraints = DictOption["list[str]"](
        "--resolves-to-interpreter-constraints",
        help=
        ("Override the interpreter constraints to use when generating a resolve's lockfile "
         "with the `generate-lockfiles` goal.\n\n"
         "By default, each resolve from `[python].resolves` will use your "
         "global interpreter constraints set in `[python].interpreter_constraints`. With "
         "this option, you can override each resolve to use certain interpreter "
         "constraints, such as `{'data-science': ['==3.8.*']}`.\n\n"
         "Pants will validate that the interpreter constraints of your code using a "
         "resolve are compatible with that resolve's own constraints. For example, if your "
         "code is set to use ['==3.9.*'] via the `interpreter_constraints` field, but it's "
         "also using a resolve whose interpreter constraints are set to ['==3.7.*'], then "
         "Pants will error explaining the incompatibility.\n\n"
         "The keys must be defined as resolves in `[python].resolves`."),
        advanced=True,
    )
    invalid_lockfile_behavior = EnumOption(
        "--invalid-lockfile-behavior",
        default=InvalidLockfileBehavior.error,
        help=
        ("The behavior when a lockfile has requirements or interpreter constraints that are "
         "not compatible with what the current build is using.\n\n"
         "We recommend keeping the default of `error` for CI builds.\n\n"
         "Note that `warn` will still expect a Pants lockfile header, it only won't error if "
         "the lockfile is stale and should be regenerated. Use `ignore` to avoid needing a "
         "lockfile header at all, e.g. if you are manually managing lockfiles rather than "
         "using the `generate-lockfiles` goal."),
        advanced=True,
    )
    _lockfile_generator = EnumOption(
        "--lockfile-generator",
        default=LockfileGenerator.POETRY,
        help=
        ("Whether to use Pex or Poetry with the `generate-lockfiles` goal.\n\n"
         "Poetry does not work with `[python-repos]` for custom indexes/cheeseshops. If you use "
         "this feature, you should use Pex.\n\n"
         "Several users have also had issues with how Poetry's lockfile generation handles "
         "environment markers for transitive dependencies; certain dependencies end up with "
         "nonsensical environment markers which cause the dependency to not be installed, then "
         "for Pants/Pex to complain the dependency is missing, even though it's in the "
         "lockfile. There is a workaround: for `[python].resolves`, manually create a "
         "`python_requirement` target for the problematic transitive dependencies so that they "
         "are seen as direct requirements, rather than transitive. For tool lockfiles, add the "
         "problematic transitive dependency to `[tool].extra_requirements`, e.g. "
         "`[isort].extra_requirements`. Then, regenerate the lockfile(s) with the "
         "`generate-lockfiles` goal. Alternatively, use Pex for generation.\n\n"
         "Finally, installing from a Poetry-generated lockfile is slower than installing from a "
         "Pex lockfile.\n\n"
         "However, Pex lockfile generation is a new feature. Given how vast the Python packaging "
         "ecosystem is, it is possible you may experience edge cases / bugs we haven't yet "
         "covered. Bug reports are appreciated! "
         "https://github.com/pantsbuild/pants/issues/new/choose\n\n"
         "Note that while Pex generates locks in a proprietary JSON format, you can use the "
         f"`{bin_name()} export` goal for Pants to create a virtual environment for "
         f"interoperability with tools like IDEs."),
    )
    resolves_generate_lockfiles = BoolOption(
        "--resolves-generate-lockfiles",
        default=True,
        help=
        ("If False, Pants will not attempt to generate lockfiles for `[python].resolves` when "
         "running the `generate-lockfiles` goal.\n\n"
         "This is intended to allow you to manually generate lockfiles as a workaround for the "
         "issues described in the `[python].enable_resolves` option.\n\n"
         "If you set this to False, Pants will not attempt to validate the metadata headers "
         "for your user lockfiles. This is useful so that you can keep "
         "`[python].invalid_lockfile_behavior` to `error` or `warn` if you'd like so that tool "
         "lockfiles continue to be validated, while user lockfiles are skipped."
         ),
        advanced=True,
    )
    run_against_entire_lockfile = BoolOption(
        "--run-against-entire-lockfile",
        default=False,
        help=
        ("If enabled, when running binaries, tests, and repls, Pants will use the entire "
         "lockfile/constraints file instead of just the relevant subset. This can improve "
         "performance and reduce cache size, but has two consequences: 1) All cached test "
         "results will be invalidated if any requirement in the lockfile changes, rather "
         "than just those that depend on the changed requirement. 2) Requirements unneeded "
         "by a test/run/repl will be present on the sys.path, which might in rare cases "
         "cause their behavior to change.\n\n"
         "This option does not affect packaging deployable artifacts, such as "
         "PEX files, wheels and cloud functions, which will still use just the exact "
         "subset of requirements needed."),
        advanced=True,
    )
    resolver_manylinux = StrOption(
        "--resolver-manylinux",
        default="manylinux2014",
        help=
        "Whether to allow resolution of manylinux wheels when resolving requirements for "
        "foreign linux platforms. The value should be a manylinux platform upper bound, "
        "e.g.: 'manylinux2010', or else the string 'no' to disallow.",
        advanced=True,
    )
    tailor_ignore_solitary_init_files = BoolOption(
        "--tailor-ignore-solitary-init-files",
        default=True,
        help=
        "Don't tailor `python_sources` targets for solitary `__init__.py` files, as "
        "those usually exist as import scaffolding rather than true library code.\n\n"
        "Set to False if you commonly have packages containing real code in "
        "`__init__.py` and there are no other .py files in the package.",
        advanced=True,
    )
    tailor_requirements_targets = BoolOption(
        "--tailor-requirements-targets",
        default=True,
        help="Tailor python_requirements() targets for requirements files.",
        advanced=True,
    )
    tailor_pex_binary_targets = BoolOption(
        "--tailor-pex-binary-targets",
        default=True,
        help="Tailor pex_binary() targets for Python entry point files.",
        advanced=True,
    )
    macos_big_sur_compatibility = BoolOption(
        "--macos-big-sur-compatibility",
        default=False,
        help=
        "If set, and if running on MacOS Big Sur, use macosx_10_16 as the platform "
        "when building wheels. Otherwise, the default of macosx_11_0 will be used. "
        "This may be required for pip to be able to install the resulting distribution "
        "on Big Sur.",
    )

    @property
    def generate_lockfiles_with_pex(self) -> bool:
        """Else, generate with Poetry."""
        if self.options.is_default("lockfile_generator"):
            warn_or_error(
                "2.12.0.dev0",
                "`[python].lockfile_generator` defaulting to 'poetry'",
                softwrap(f"""
                    In Pants 2.12, Pants will default to using Pex to generate lockfiles
                    with the `generate-lockfiles` goal, rather than Poetry. Run
                    `{bin_name()} help-advanced python` for more information on the benefits and
                    possible issues with switching to Pex.

                    To keep using Poetry, set `[python].lockfile_generator = 'poetry'` in
                    pants.toml. To try Pex, set to 'pex'.

                    Note that you can incrementally switch to Pex lockfiles if you want to reduce
                    risk while migrating. The option `[python].lockfile_generator` only impacts
                    how Pants generates new lockfiles; you can continue to use
                    requirements.txt-style lockfiles (i.e. those generated by Poetry) even if
                    new lockfiles are generated in Pex's JSON format. For example, you can run
                    `{bin_name()} --python-lockfile-generator=pex generate-lockfiles
                    --resolve=isort` to only regenerate the isort lockfile.
                    """),
            )

        return self._lockfile_generator == LockfileGenerator.PEX

    @memoized_property
    def resolves_to_interpreter_constraints(
            self) -> dict[str, tuple[str, ...]]:
        result = {}
        for resolve, ics in self._resolves_to_interpreter_constraints.items():
            if resolve not in self.resolves:
                raise KeyError(
                    "Unrecognized resolve name in the option "
                    f"`[python].resolves_to_interpreter_constraints`: {resolve}. Each "
                    "key must be one of the keys in `[python].resolves`: "
                    f"{sorted(self.resolves.keys())}")
            result[resolve] = tuple(ics)
        return result

    def resolve_all_constraints_was_set_explicitly(self) -> bool:
        return not self.options.is_default("resolve_all_constraints")

    @property
    def manylinux(self) -> str | None:
        manylinux = cast(Optional[str], self.resolver_manylinux)
        if manylinux is None or manylinux.lower() in ("false", "no", "none"):
            return None
        return manylinux

    @property
    def manylinux_pex_args(self) -> Iterator[str]:
        if self.manylinux:
            yield "--manylinux"
            yield self.manylinux
        else:
            yield "--no-manylinux"

    @property
    def scratch_dir(self):
        return os.path.join(self.options.pants_workdir,
                            *self.options_scope.split("."))

    def compatibility_or_constraints(
            self, compatibility: Iterable[str] | None) -> tuple[str, ...]:
        """Return either the given `compatibility` field or the global interpreter constraints.

        If interpreter constraints are supplied by the CLI flag, return those only.
        """
        if self.options.is_flagged("interpreter_constraints"):
            return self.interpreter_constraints
        return tuple(compatibility or self.interpreter_constraints)

    def compatibilities_or_constraints(
            self, compatibilities: Iterable[Iterable[str] | None]
    ) -> tuple[str, ...]:
        return tuple(
            constraint for compatibility in compatibilities
            for constraint in self.compatibility_or_constraints(compatibility))
Ejemplo n.º 11
0
class Yapf(PythonToolBase):
    options_scope = "yapf"
    name = "yapf"
    help = "A formatter for Python files (https://github.com/google/yapf)."

    default_version = "yapf==0.32.0"
    default_extra_requirements = ["toml"]
    default_main = ConsoleScript("yapf")

    register_interpreter_constraints = True
    default_interpreter_constraints = ["CPython>=3.7,<4"]

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.lint.yapf", "yapf.lock")
    default_lockfile_path = "src/python/pants/backend/python/lint/yapf/yapf.lock"
    default_lockfile_url = git_url(default_lockfile_path)

    skip = SkipOption("fmt", "lint")
    args = ArgsListOption(
        example="--no-local-style",
        extra_help=softwrap(
            """
            Certain arguments, specifically `--recursive`, `--in-place`, and
            `--parallel`, will be ignored because Pants takes care of finding
            all the relevant files and running the formatting in parallel.
            """
        ),
    )
    export = ExportToolOption()
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls: softwrap(
            f"""
            Path to style file understood by yapf
            (https://github.com/google/yapf#formatting-style/).

            Setting this option will disable `[{cls.options_scope}].config_discovery`. Use
            this option if the config is located in a non-standard location.
            """
        ),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls: softwrap(
            f"""
            If true, Pants will include any relevant config files during
            runs (`.style.yapf`, `pyproject.toml`, and `setup.cfg`).

            Use `[{cls.options_scope}].config` instead if your config is in a
            non-standard location.
            """
        ),
    )

    def config_request(self, dirs: Iterable[str]) -> ConfigFilesRequest:
        # Refer to https://github.com/google/yapf#formatting-style.
        check_existence = []
        check_content = {}
        for d in ("", *dirs):
            check_existence.append(os.path.join(d, ".yapfignore"))
            check_content.update(
                {
                    os.path.join(d, "pyproject.toml"): b"[tool.yapf",
                    os.path.join(d, "setup.cfg"): b"[yapf]",
                    os.path.join(d, ".style.yapf"): b"[style]",
                }
            )

        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_existence=check_existence,
            check_content=check_content,
        )
Ejemplo n.º 12
0
class Hadolint(TemplatedExternalTool):
    options_scope = "hadolint"
    name = "Hadolint"
    help = "A linter for Dockerfiles."

    default_version = "v2.8.0"
    # TODO: https://github.com/hadolint/hadolint/issues/411 tracks building and releasing
    #  hadolint for Linux ARM64.
    default_known_versions = [
        "v2.8.0|macos_x86_64|27985f257a216ecab06a16e643e8cb0123e7145b5d526cfcb4ce7a31fe99f357|2428944",
        "v2.8.0|macos_arm64 |27985f257a216ecab06a16e643e8cb0123e7145b5d526cfcb4ce7a31fe99f357|2428944",  # same as mac x86
        "v2.8.0|linux_x86_64|9dfc155139a1e1e9b3b28f3de9907736b9dfe7cead1c3a0ae7ff0158f3191674|5895708",
    ]
    default_url_template = (
        "https://github.com/hadolint/hadolint/releases/download/{version}/hadolint-{platform}"
    )
    default_url_platform_mapping = {
        "macos_arm64": "Darwin-x86_64",
        "macos_x86_64": "Darwin-x86_64",
        "linux_x86_64": "Linux-x86_64",
    }

    skip = SkipOption("lint")
    args = ArgsListOption(example="--format json")
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls: softwrap(
            f"""
            Path to an YAML config file understood by Hadolint
            (https://github.com/hadolint/hadolint#configure).

            Setting this option will disable `[{cls.options_scope}].config_discovery`. Use
            this option if the config is located in a non-standard location.
            """
        ),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls: softwrap(
            f"""
            If true, Pants will include all relevant config files during runs
            (`.hadolint.yaml` and `.hadolint.yml`).

            Use `[{cls.options_scope}].config` instead if your config is in a
            non-standard location.
            """
        ),
    )

    def config_request(self) -> ConfigFilesRequest:
        # Refer to https://github.com/hadolint/hadolint#configure for how config files are
        # discovered.
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_existence=[".hadolint.yaml", ".hadolint.yml"],
        )
Ejemplo n.º 13
0
    class MySubsystem(Subsystem):
        def __init__(self):
            pass

        str_opt = StrOption("--opt", default="", help="")
        optional_str_opt = StrOption("--opt", default=None, help="")
        int_opt = IntOption("--opt", default=0, help="")
        optional_int_opt = IntOption("--opt", default=None, help="")
        float_opt = FloatOption("--opt", default=1.0, help="")
        optional_float_opt = FloatOption("--opt", default=None, help="")
        bool_opt = BoolOption("--opt", default=True, help="")
        optional_bool_opt = BoolOption("--opt", default=None, help="")
        target_opt = TargetOption("--opt", default="", help="")
        optional_target_opt = TargetOption("--opt", default=None, help="")
        dir_opt = DirOption("--opt", default="", help="")
        optional_dir_opt = DirOption("--opt", default=None, help="")
        file_opt = FileOption("--opt", default="", help="")
        optional_file_opt = FileOption("--opt", default=None, help="")
        shellstr_opt = ShellStrOption("--opt", default="", help="")
        optional_shellstr_opt = ShellStrOption("--opt", default=None, help="")
        memorysize_opt = MemorySizeOption("--opt", default=1, help="")
        optional_memorysize_opt = MemorySizeOption("--opt", default=None, help="")

        # List opts
        str_list_opt = StrListOption("--opt", help="")
        int_list_opt = IntListOption("--opt", help="")
        float_list_opt = FloatListOption("--opt", help="")
        bool_list_opt = BoolListOption("--opt", help="")
        target_list_opt = TargetListOption("--opt", help="")
        dir_list_opt = DirListOption("--opt", help="")
        file_list_opt = FileListOption("--opt", help="")
        shellstr_list_opt = ShellStrListOption("--opt", help="")
        memorysize_list_opt = MemorySizeListOption("--opt", help="")
        # And just test one dynamic default
        dyn_str_list_opt = StrListOption("--opt", default=lambda cls: cls.default, help="")

        # Enum opts
        enum_opt = EnumOption("--opt", default=MyEnum.Val1, help="")
        optional_enum_opt = EnumOption("--opt", enum_type=MyEnum, default=None, help="")
        dyn_enum_opt = EnumOption(
            "--opt", enum_type=MyEnum, default=lambda cls: cls.default, help=""
        )
        # mypy correctly complains about not matching any possibilities
        enum_opt_bad = EnumOption("--opt", default=None, help="")  # type: ignore[call-overload]
        enum_list_opt1 = EnumListOption("--opt", default=[MyEnum.Val1], help="")
        enum_list_opt2 = EnumListOption("--opt", enum_type=MyEnum, help="")
        dyn_enum_list_opt = EnumListOption(
            "--opt", enum_type=MyEnum, default=lambda cls: cls.default_list, help=""
        )
        # mypy correctly complains about needing a type annotation
        enum_list_bad_opt = EnumListOption("--opt", default=[], help="")  # type: ignore[var-annotated]

        # Dict opts
        dict_opt1 = DictOption[str]("--opt", help="")
        dict_opt2 = DictOption[Any]("--opt", default=dict(key="val"), help="")
        # mypy correctly complains about needing a type annotation
        dict_opt3 = DictOption("--opt", help="")  # type: ignore[var-annotated]
        dict_opt4 = DictOption("--opt", default={"key": "val"}, help="")
        dict_opt5 = DictOption("--opt", default=dict(key="val"), help="")
        dict_opt6 = DictOption("--opt", default=dict(key=1), help="")
        dict_opt7 = DictOption("--opt", default=dict(key1=1, key2="str"), help="")
        dyn_dict_opt = DictOption[str]("--opt", default=lambda cls: cls.default, help="")

        # Specialized Opts
        skip_opt = SkipOption("fmt")
        args_opt = ArgsListOption(example="--whatever")