Exemple #1
0
class JvmSubsystem(Subsystem):
    options_scope = "jvm"
    help = (
        "Options for general JVM functionality.\n\n"
        " JDK strings will be passed directly to Coursier's `--jvm` parameter."
        " Run `cs java --available` to see a list of available JVM versions on your platform.\n\n"
        " If the string 'system' is passed, Coursier's `--system-jvm` option will be used"
        " instead, but note that this can lead to inconsistent behavior since the JVM version"
        " will be whatever happens to be found first on the system's PATH.")

    tool_jdk = StrOption(
        "--tool-jdk",
        default="adopt:1.11",
        help=
        ("The JDK to use when building and running Pants' internal JVM support code and other "
         "non-compiler tools. See `jvm` help for supported values."),
        advanced=True,
    )
    jdk = StrOption(
        "--jdk",
        default="adopt:1.11",
        help=
        ("The JDK to use.\n\n"
         " This string will be passed directly to Coursier's `--jvm` parameter."
         " Run `cs java --available` to see a list of available JVM versions on your platform.\n\n"
         " If the string 'system' is passed, Coursier's `--system-jvm` option will be used"
         " instead, but note that this can lead to inconsistent behavior since the JVM version"
         " will be whatever happens to be found first on the system's PATH."),
        advanced=True,
    )
    resolves = DictOption(
        "--resolves",
        default={"jvm-default": "3rdparty/jvm/default.lock"},
        # TODO: expand help message
        help=
        "A dictionary mapping resolve names to the path of their lockfile.",
    )
    default_resolve = StrOption(
        "--default-resolve",
        default="jvm-default",
        help=
        ("The default value used for the `resolve` and `compatible_resolves` fields.\n\n"
         "The name must be defined as a resolve in `[jvm].resolves`."),
    )
    debug_args = StrListOption(
        "--debug-args",
        help=
        ("Extra JVM arguments to use when running tests in debug mode.\n\n"
         "For example, if you want to attach a remote debugger, use something like "
         "['-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005']"
         ),
    )
Exemple #2
0
class JvmToolBase(Subsystem):
    """Base class for subsystems that configure a set of artifact requirements for a JVM tool."""

    # Default version of the tool. (Subclasses may set.)
    default_version: ClassVar[str | None] = None

    # Default artifacts for the tool in GROUP:NAME format. The `--version` value will be used for the
    # artifact version if it has not been specified for a particular requirement. (Subclasses must set.)
    default_artifacts: ClassVar[tuple[str, ...]]

    # Default resource for the tool's lockfile. (Subclasses must set.)
    default_lockfile_resource: ClassVar[tuple[str, str]]

    default_lockfile_url: ClassVar[str | None] = None

    version = StrOption(
        "--version",
        advanced=True,
        default=lambda cls: cls.default_version,
        help=lambda cls:
        ("Version string for the tool. This is available for substitution in the "
         f"`[{cls.options_scope}].artifacts` option by including the string "
         "`{version}`."),
    )
    artifacts = StrListOption(
        "--artifacts",
        advanced=True,
        default=lambda cls: list(cls.default_artifacts),
        help=lambda cls:
        ("Artifact requirements for this tool using specified as either the address of a `jvm_artifact` "
         "target or, alternatively, as a colon-separated Maven coordinates (e.g., group:name:version). "
         "For Maven coordinates, the string `{version}` version will be substituted with the value of the "
         f"`[{cls.options_scope}].version` option."),
    )
    lockfile = StrOption(
        "--lockfile",
        default=DEFAULT_TOOL_LOCKFILE,
        advanced=True,
        help=lambda cls:
        ("Path to a lockfile used for installing the tool.\n\n"
         f"Set to the string `{DEFAULT_TOOL_LOCKFILE}` to use a lockfile provided by "
         "Pants, so long as you have not changed the `--version` option. "
         f"See {cls.default_lockfile_url} for the default lockfile contents.\n\n"
         "To use a custom lockfile, set this option to a file path relative to the "
         f"build root, then run `{bin_name()} jvm-generate-lockfiles "
         f"--resolve={cls.options_scope}`.\n\n"),
    )

    @property
    def artifact_inputs(self) -> tuple[str, ...]:
        return tuple(s.format(version=self.version) for s in self.artifacts)
Exemple #3
0
class PathsSubsystem(Outputting, GoalSubsystem):
    name = "paths"
    help = "List the paths between two addresses."

    path_from = StrOption(
        "--from",
        default=None,
        help="The path starting address",
    )

    path_to = StrOption(
        "--to",
        default=None,
        help="The path end address",
    )
class GenerateLockfilesSubsystem(GoalSubsystem):
    name = "generate-lockfiles"
    help = "Generate lockfiles for Python third-party dependencies."

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return (GenerateToolLockfileSentinel in union_membership
                or KnownUserResolveNamesRequest in union_membership)

    resolve_names = StrListOption(
        "--resolve",
        advanced=False,
        help=
        ("Only generate lockfiles for the specified resolve(s).\n\n"
         "Resolves are the logical names for the different lockfiles used in your project. "
         "For your own code's dependencies, these come from the option "
         "`[python].resolves`. For tool lockfiles, resolve "
         "names are the options scope for that tool such as `black`, `pytest`, and "
         "`mypy-protobuf`.\n\n"
         f"For example, you can run `{bin_name()} generate-lockfiles --resolve=black "
         "--resolve=pytest --resolve=data-science` to only generate lockfiles for those "
         "two tools and your resolve named `data-science`.\n\n"
         "If you specify an invalid resolve name, like 'fake', Pants will output all "
         "possible values.\n\n"
         "If not specified, Pants will generate lockfiles for all resolves."),
    )
    custom_command = StrOption(
        "--custom-command",
        advanced=True,
        default=None,
        help=
        ("If set, lockfile headers will say to run this command to regenerate the lockfile, "
         f"rather than running `{bin_name()} generate-lockfiles --resolve=<name>` like normal."
         ),
    )
Exemple #5
0
class AnonymousTelemetry(Subsystem):
    options_scope = "anonymous-telemetry"
    help = "Options related to sending anonymous stats to the Pants project, to aid development."

    enabled = BoolOption(
        "--enabled",
        default=False,
        help=(
            f"Whether to send anonymous telemetry to the Pants project.\nTelemetry is sent "
            f"asynchronously, with silent failure, and does not impact build times or "
            f"outcomes.\n{_telemetry_docs_referral}."
        ),
        advanced=True,
    )
    repo_id = StrOption(
        "--repo-id",
        default=None,
        help=(
            f"An anonymized ID representing this repo.\nFor private repos, you likely want the "
            f"ID to not be derived from, or algorithmically convertible to, anything "
            f"identifying the repo.\nFor public repos the ID may be visible in that repo's "
            f"config file, so anonymity of the repo is not guaranteed (although user anonymity "
            f"is always guaranteed).\n{_telemetry_docs_referral}."
        ),
        advanced=True,
    )
Exemple #6
0
class WorkunitsLoggerOptions(Subsystem):
    options_scope = "workunit-logger"
    help = """Example plugin that logs workunits to a file."""

    dest = StrOption("--dest",
                     default=None,
                     help="A filename to log workunits to.")
Exemple #7
0
class TestSubsystem(GoalSubsystem):
    name = "test"
    help = "Run tests."

    # Prevent this class from being detected by pytest as a test class.
    __test__ = False

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return TestFieldSet in union_membership

    debug = BoolOption(
        "--debug",
        default=False,
        help=
        ("Run tests sequentially in an interactive process. This is necessary, for "
         "example, when you add breakpoints to your code."),
    )
    force = BoolOption(
        "--force",
        default=False,
        help=
        "Force the tests to run, even if they could be satisfied from cache.",
    )
    output = EnumOption(
        "--output",
        default=ShowOutput.FAILED,
        help="Show stdout/stderr for these tests.",
    )
    use_coverage = BoolOption(
        "--use-coverage",
        default=False,
        help="Generate a coverage report if the test runner supports it.",
    )
    open_coverage = BoolOption(
        "--open-coverage",
        default=False,
        help=
        ("If a coverage report file is generated, open it on the local system if the "
         "system supports this."),
    )
    xml_dir = StrOption(
        "--xml-dir",
        metavar="<DIR>",
        default=None,
        advanced=True,
        help=
        ("Specifying a directory causes Junit XML result files to be emitted under "
         "that dir for each test run that supports producing them."),
    )
    extra_env_vars = StrListOption(
        "--extra-env-vars",
        help=
        ("Additional environment variables to include in test processes. "
         "Entries are strings in the form `ENV_VAR=value` to use explicitly; or just "
         "`ENV_VAR` to copy the value of a variable in Pants's own environment."
         ),
    )
Exemple #8
0
class TemplatedExternalTool(ExternalTool):
    """Extends the ExternalTool to allow url templating for custom/self-hosted source.

    In addition to ExternalTool functionalities, it is needed to set, e.g.:

    default_url_template = "https://tool.url/{version}/{platform}-mytool.zip"
    default_url_platform_mapping = {
        "macos_x86_64": "osx_intel",
        "macos_arm64": "osx_arm",
        "linux_x86_64": "linux",
    }

    The platform mapping dict is optional.
    """

    default_url_template: str
    default_url_platform_mapping: dict[str, str] | None = None

    url_template = StrOption(
        "--url-template",
        default=lambda cls: cls.default_url_template,
        advanced=True,
        help=softwrap(f"""
            URL to download the tool, either as a single binary file or a compressed file
            (e.g. zip file). You can change this to point to your own hosted file, e.g. to
            work with proxies or for access via the filesystem through a `file:$abspath` URL (e.g.
            `file:/this/is/absolute`, possibly by
            [templating the buildroot in a config file]({doc_url('options#config-file-entries')})).

            Use `{{version}}` to have the value from --version substituted, and `{{platform}}` to
            have a value from --url-platform-mapping substituted in, depending on the
            current platform. For example,
            https://github.com/.../protoc-{{version}}-{{platform}}.zip.
            """),
    )

    url_platform_mapping = DictOption[str](
        "--url-platform-mapping",
        default=lambda cls: cls.default_url_platform_mapping,
        advanced=True,
        help=softwrap("""
            A dictionary mapping platforms to strings to be used when generating the URL
            to download the tool.

            In --url-template, anytime the `{platform}` string is used, Pants will determine the
            current platform, and substitute `{platform}` with the respective value from your dictionary.

            For example, if you define `{"macos_x86_64": "apple-darwin", "linux_x86_64": "unknown-linux"}`,
            and run Pants on Linux with an intel architecture, then `{platform}` will be substituted
            in the --url-template option with unknown-linux.
            """),
    )

    def generate_url(self, plat: Platform):
        platform = self.url_platform_mapping.get(plat.value, "")
        return self.url_template.format(version=self.version,
                                        platform=platform)
Exemple #9
0
class Scalac(Subsystem):
    options_scope = "scalac"
    name = "scalac"
    help = "The Scala compiler."

    default_plugins_lockfile_path = (
        "src/python/pants/backend/scala/subsystems/scalac_plugins.default.lockfile.txt"
    )
    default_plugins_lockfile_resource = (
        "pants.backend.scala.subsystems",
        "scalac_plugins.default.lockfile.txt",
    )

    args = ArgsListOption(example="-encoding UTF-8")
    plugins_global = StrListOption(
        "--plugins-global",
        help=
        ("A list of addresses of `scalac_plugin` targets which should be used for "
         "compilation of all Scala targets in a build.\n\nIf you set this, you must also "
         "set `[scalac].plugins_global_lockfile`."),
        advanced=True,
        removal_version="2.12.0.dev0",
        removal_hint=
        "Use `--scalac-plugins-for-resolve` instead to use user resolves",
    )

    # TODO: see if we can use an actual list mechanism? If not, this seems like an OK option
    default_plugins = DictOption[str](
        "--plugins-for-resolve",
        help=
        ("A dictionary, whose keys are the names of each JVM resolve that requires default "
         "`scalac` plugins, and the value is a comma-separated string consisting of scalac plugin "
         "names. Each specified plugin must have a corresponding `scalac_plugin` target that specifies "
         "that name in either its `plugin_name` field or is the same as its target name."
         ),
    )

    plugins_global_lockfile = StrOption(
        "--plugins-global-lockfile",
        default=DEFAULT_TOOL_LOCKFILE,
        help=
        ("The filename of the lockfile for global plugins. You must set this option to a "
         "file path, e.g. '3rdparty/jvm/global_scalac_plugins.lock', if you set "
         "`[scalac].plugins_global`."),
        advanced=True,
        removal_version="2.12.0.dev0",
        removal_hint=
        "Use `--scalac-plugins-for-resolve` instead, which will add plugin dependencies to JVM user resolves.",
    )

    def parsed_default_plugins(self) -> dict[str, list[str]]:
        return {
            key: [i.strip() for i in value.split(",")]
            for key, value in self.default_plugins.items()
        }
Exemple #10
0
class ExplorerBuiltinGoal(BuiltinGoal):
    name = "experimental-explorer"
    help = "Run the Pants Explorer Web UI server."
    address = StrOption("--address", default="localhost", help="Server address to bind to.")
    port = IntOption("--port", default=8000, help="Server port to bind to.")

    def run(
        self,
        build_config: BuildConfiguration,
        graph_session: GraphSession,
        options: Options,
        specs: Specs,
        union_membership: UnionMembership,
    ) -> ExitCode:
        for server_request_type in union_membership.get(ExplorerServerRequest):
            logger.info(f"Using {server_request_type.__name__} to create the explorer server.")
            break
        else:
            logger.error(
                softwrap(
                    """
                    There is no Explorer backend server implementation registered.

                    Activate a backend/plugin that registers an implementation for the
                    `ExplorerServerRequest` union to fix this issue.
                    """
                )
            )
            return 127

        all_help_info = HelpInfoExtracter.get_all_help_info(
            options,
            union_membership,
            graph_session.goal_consumed_subsystem_scopes,
            RegisteredTargetTypes.create(build_config.target_types),
            build_config,
        )
        request_state = RequestState(
            all_help_info=all_help_info,
            build_configuration=build_config,
            scheduler_session=graph_session.scheduler_session,
        )
        server_request = server_request_type(
            address=self.address,
            port=self.port,
            request_state=request_state,
        )
        server = request_state.product_request(
            ExplorerServer,
            (server_request,),
            poll=True,
            timeout=90,
        )
        return server.run()
Exemple #11
0
class PublishSubsystem(GoalSubsystem):
    name = "publish"
    help = "Publish deliverables (assets, distributions, images, etc)."

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return PackageFieldSet in union_membership and PublishFieldSet in union_membership

    output = StrOption(
        "--output",
        default=None,
        help="Filename for JSON structured publish information.",
    )
Exemple #12
0
class Changed(Subsystem):
    options_scope = "changed"
    help = (
        "Tell Pants to detect what files and targets have changed from Git.\n\n"
        f"See {doc_url('advanced-target-selection')}."
    )

    since = StrOption(
        "--since",
        default=None,
        help="Calculate changes since this Git spec (commit range/SHA/ref).",
    )
    diffspec = StrOption(
        "--diffspec",
        default=None,
        help="Calculate changes contained within a given Git spec (commit range/SHA/ref).",
    )
    dependees = EnumOption(
        "--dependees",
        default=DependeesOption.NONE,
        help="Include direct or transitive dependees of changed targets.",
    )
Exemple #13
0
class HelmSubsystem(TemplatedExternalTool):
    options_scope = "helm"
    help = "The Helm command line (https://helm.sh)"

    default_version = "3.8.0"
    default_known_versions = [
        "3.8.0|linux_arm64 |23e08035dc0106fe4e0bd85800fd795b2b9ecd9f32187aa16c49b0a917105161|12324642",
        "3.8.0|linux_x86_64|8408c91e846c5b9ba15eb6b1a5a79fc22dd4d33ac6ea63388e5698d1b2320c8b|13626774",
        "3.8.0|macos_arm64 |751348f1a4a876ffe089fd68df6aea310fd05fe3b163ab76aa62632e327122f3|14078604",
        "3.8.0|macos_x86_64|532ddd6213891084873e5c2dcafa577f425ca662a6594a3389e288fc48dc2089|14318316",
    ]
    default_url_template = "https://get.helm.sh/helm-v{version}-{platform}.tar.gz"
    default_url_platform_mapping = {
        "linux_arm64": "linux-arm64",
        "linux_x86_64": "linux-amd64",
        "macos_arm64": "darwin-arm64",
        "macos_x86_64": "darwin-amd64",
    }

    _registries = DictOption[Any]("--registries",
                                  help=registries_help,
                                  fromfile=True)
    lint_strict = BoolOption("--lint-strict",
                             default=False,
                             help="Enables strict linting of Helm charts")
    default_registry_repository = StrOption(
        "--default-registry-repository",
        default=None,
        help=softwrap("""
            Default location where to push Helm charts in the available registries
            when no specific one has been given.

            If no registry repository is given, charts will be pushed to the root of
            the OCI registry.
            """),
    )
    tailor = BoolOption(
        "--tailor",
        default=True,
        help="If true, add `helm_chart` targets with the `tailor` goal.",
        advanced=True,
    )

    def generate_exe(self, plat: Platform) -> str:
        mapped_plat = self.default_url_platform_mapping[plat.value]
        bin_path = os.path.join(mapped_plat, "helm")
        return bin_path

    @memoized_method
    def remotes(self) -> HelmRemotes:
        return HelmRemotes.from_dict(self._registries)
Exemple #14
0
class GolangSubsystem(Subsystem):
    options_scope = "golang"
    help = "Options for Golang support."

    _go_search_paths = StrListOption(
        "--go-search-paths",
        default=["<PATH>"],
        help=
        ("A list of paths to search for Go.\n\n"
         "Specify absolute paths to directories with the `go` binary, e.g. `/usr/bin`. "
         "Earlier entries will be searched first.\n\n"
         "The special string '<PATH>' will expand to the contents of the PATH env var."
         ),
    )
    # TODO(#13005): Support multiple Go versions in a project?
    expected_version = StrOption(
        "--expected-version",
        default="1.17",
        help=
        ("The Go version you are using, such as `1.17`.\n\n"
         "Pants will only use Go distributions from `--go-search-paths` that have the "
         "expected version, and it will error if none are found.\n\n"
         "Do not include the patch version."),
    )
    _subprocess_env_vars = StrListOption(
        "--subprocess-env-vars",
        default=["LANG", "LC_CTYPE", "LC_ALL", "PATH"],
        help=
        ("Environment variables to set when invoking the `go` tool. "
         "Entries are either strings in the form `ENV_VAR=value` to set an explicit value; "
         "or just `ENV_VAR` to copy the value from Pants's own environment."),
        advanced=True,
    )

    def go_search_paths(self, env: Environment) -> tuple[str, ...]:
        def iter_path_entries():
            for entry in self._go_search_paths:
                if entry == "<PATH>":
                    path = env.get("PATH")
                    if path:
                        yield from path.split(os.pathsep)
                else:
                    yield entry

        return tuple(OrderedSet(iter_path_entries()))

    @property
    def env_vars_to_pass_to_subprocesses(self) -> tuple[str, ...]:
        return tuple(sorted(set(self._subprocess_env_vars)))
    class MySubsystem(Subsystem):
        def __init__(self):
            self.options = SimpleNamespace()

        prop = StrOption(
            "--opt",
            default=None,
            help="",
            advanced=True,
            daemon=True,
            default_help_repr="Help!",
            fingerprint=False,
            fromfile=True,
            metavar="META",
            mutually_exclusive_group="group",
            removal_hint="it's purple",
            removal_version="99.9.9",
        )
Exemple #16
0
class ReplSubsystem(GoalSubsystem):
    name = "repl"
    help = "Open a REPL with the specified code loadable."

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return ReplImplementation in union_membership

    shell = StrOption(
        "--shell",
        default=None,
        help="Override the automatically-detected REPL program for the target(s) specified.",
    )
    restartable = BoolOption(
        "--restartable",
        default=False,
        help="True if the REPL should be restarted if its inputs have changed.",
    )
Exemple #17
0
class LineOriented(Outputting):
    sep = StrOption(
        "--sep",
        default="\\n",
        metavar="<separator>",
        help="String to use to separate lines in line-oriented output.",
    )

    @final
    @contextmanager
    def line_oriented(self,
                      console: "Console") -> Iterator[Callable[[str], None]]:
        """Given a Console, yields a function for printing lines to stdout or a file.

        The passed options instance will generally be the `Goal.Options` of an `Outputting` `Goal`.
        """
        sep = self.sep.encode().decode("unicode_escape")
        with self.output_sink(console) as output_sink:
            yield lambda msg: print(msg, file=output_sink, end=sep)
Exemple #18
0
class Outputting:
    """A mixin for Goal that adds options to support output-related context managers.

    Allows output to go to a file or to stdout.

    Useful for goals whose purpose is to emit output to the end user (as distinct from incidental logging to stderr).
    """

    output_file = StrOption(
        "--output-file",
        default=None,
        metavar="<path>",
        help=
        "Output the goal's stdout to this file. If unspecified, outputs to stdout.",
    )

    @final
    @contextmanager
    def output(self, console: "Console") -> Iterator[Callable[[str], None]]:
        """Given a Console, yields a function for writing data to stdout, or a file.

        The passed options instance will generally be the `Goal.Options` of an `Outputting` `Goal`.
        """
        with self.output_sink(console) as output_sink:
            yield lambda msg: output_sink.write(
                msg)  # type: ignore[no-any-return]

    @final
    @contextmanager
    def output_sink(self, console: "Console") -> Iterator:
        stdout_file = None
        if self.output_file:
            stdout_file = open(self.output_file, "w")
            output_sink = stdout_file
        else:
            output_sink = console.stdout
        try:
            yield output_sink
        finally:
            output_sink.flush()
            if stdout_file:
                stdout_file.close()
Exemple #19
0
class ApacheThriftSubsystem(Subsystem):
    options_scope = "apache-thrift"
    help = "Apache Thrift IDL compiler (https://thrift.apache.org/)."

    _thrift_search_paths = StrListOption(
        "--thrift-search-paths",
        default=["<PATH>"],
        help=softwrap("""
            A list of paths to search for Thrift.

            Specify absolute paths to directories with the `thrift` binary, e.g. `/usr/bin`.
            Earlier entries will be searched first.

            The special string '<PATH>' will expand to the contents of the PATH env var.
            """),
    )
    expected_version = StrOption(
        "--expected-version",
        default="0.15",
        help=softwrap("""
            The major/minor version of Apache Thrift that  you are using, such as `0.15`.

            Pants will only use Thrift binaries from `--thrift-search-paths` that have the
            expected version, and it will error if none are found.

            Do not include the patch version.
            """),
    )

    def thrift_search_paths(self, env: Environment) -> tuple[str, ...]:
        def iter_path_entries():
            for entry in self._thrift_search_paths:
                if entry == "<PATH>":
                    path = env.get("PATH")
                    if path:
                        yield from path.split(os.pathsep)
                else:
                    yield entry

        return tuple(OrderedSet(iter_path_entries()))
Exemple #20
0
class JvmSubsystem(Subsystem):
    options_scope = "jvm"
    help = softwrap("""
        Options for general JVM functionality.

        JDK strings will be passed directly to Coursier's `--jvm` parameter.
        Run `cs java --available` to see a list of available JVM versions on your platform.

        If the string 'system' is passed, Coursier's `--system-jvm` option will be used
        instead, but note that this can lead to inconsistent behavior since the JVM version
        will be whatever happens to be found first on the system's PATH.
        """)

    tool_jdk = StrOption(
        "--tool-jdk",
        default="temurin:1.11",
        help=softwrap("""
            The JDK to use when building and running Pants' internal JVM support code and other
            non-compiler tools. See `jvm` help for supported values.
            """),
        advanced=True,
    )
    jdk = StrOption(
        "--jdk",
        default="temurin:1.11",
        help=softwrap("""
            The JDK to use.

            This string will be passed directly to Coursier's `--jvm` parameter.
            Run `cs java --available` to see a list of available JVM versions on your platform.

            If the string 'system' is passed, Coursier's `--system-jvm` option will be used
            instead, but note that this can lead to inconsistent behavior since the JVM version
            will be whatever happens to be found first on the system's PATH.
            """),
        advanced=True,
    )
    resolves = DictOption(
        "--resolves",
        default={"jvm-default": "3rdparty/jvm/default.lock"},
        # TODO: expand help message
        help=
        "A dictionary mapping resolve names to the path of their lockfile.",
    )
    default_resolve = StrOption(
        "--default-resolve",
        default="jvm-default",
        help=softwrap("""
            The default value used for the `resolve` and `compatible_resolves` fields.

            The name must be defined as a resolve in `[jvm].resolves`.
            """),
    )
    debug_args = StrListOption(
        "--debug-args",
        help=softwrap("""
            Extra JVM arguments to use when running tests in debug mode.

            For example, if you want to attach a remote debugger, use something like
            ['-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005']
            """),
    )
    global_options = StrListOption(
        "--global-options",
        help=softwrap("""
            List of JVM options to pass to all JVM processes.

            Options set here will be used by any JVM processes required by Pants, with
            the exception of heap memory settings like `-Xmx`, which need to be set
            using `[GLOBAL].process_total_child_memory_usage` and `[GLOBAL].process_per_child_memory_usage`.
            """),
        advanced=True,
    )
Exemple #21
0
class PyTest(PythonToolBase):
    options_scope = "pytest"
    name = "Pytest"
    help = "The pytest Python test framework (https://docs.pytest.org/)."

    # This should be compatible with requirements.txt, although it can be more precise.
    # TODO: To fix this, we should allow using a `target_option` referring to a
    #  `python_requirement` to override the version.
    # Pytest 7.1.0 introduced a significant bug that is apparently not fixed as of 7.1.1 (the most
    # recent release at the time of writing). see https://github.com/pantsbuild/pants/issues/14990.
    # TODO: Once this issue is fixed, loosen this to allow the version to float above the bad ones.
    #  E.g., as default_version = "pytest>=7,<8,!=7.1.0,!=7.1.1"
    default_version = "pytest==7.0.1"
    default_extra_requirements = ["pytest-cov>=2.12,!=2.12.1,<3.1"]

    default_main = ConsoleScript("pytest")

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.subsystems",
                                 "pytest.lock")
    default_lockfile_path = "src/python/pants/backend/python/subsystems/pytest.lock"
    default_lockfile_url = git_url(default_lockfile_path)

    args = ArgsListOption(example="-k test_foo --quiet", passthrough=True)
    timeouts_enabled = BoolOption(
        "--timeouts",
        default=True,
        help=softwrap("""
            Enable test target timeouts. If timeouts are enabled then test targets with a
            timeout= parameter set on their target will time out after the given number of
            seconds if not completed. If no timeout is set, then either the default timeout
            is used or no timeout is configured.
            """),
    )
    timeout_default = IntOption(
        "--timeout-default",
        default=None,
        advanced=True,
        help=softwrap("""
            The default timeout (in seconds) for a test target if the `timeout` field is not
            set on the target.
            """),
    )
    timeout_maximum = IntOption(
        "--timeout-maximum",
        default=None,
        advanced=True,
        help=
        "The maximum timeout (in seconds) that may be used on a `python_tests` target.",
    )
    junit_family = StrOption(
        "--junit-family",
        default="xunit2",
        advanced=True,
        help=softwrap("""
            The format of generated junit XML files. See
            https://docs.pytest.org/en/latest/reference.html#confval-junit_family.
            """),
    )
    execution_slot_var = StrOption(
        "--execution-slot-var",
        default=None,
        advanced=True,
        help=softwrap("""
            If a non-empty string, the process execution slot id (an integer) will be exposed
            to tests under this environment variable name.
            """),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=softwrap("""
            If true, Pants will include all relevant Pytest config files (e.g. `pytest.ini`)
            during runs. See
            https://docs.pytest.org/en/stable/customize.html#finding-the-rootdir for where
            config files should be located for Pytest to discover them.
            """),
    )

    export = ExportToolOption()

    @property
    def all_requirements(self) -> tuple[str, ...]:
        return (self.version, *self.extra_requirements)

    def config_request(self, dirs: Iterable[str]) -> ConfigFilesRequest:
        # Refer to https://docs.pytest.org/en/stable/customize.html#finding-the-rootdir for how
        # config files are discovered.
        check_existence = []
        check_content = {}
        for d in ("", *dirs):
            check_existence.append(os.path.join(d, "pytest.ini"))
            check_content[os.path.join(
                d, "pyproject.toml")] = b"[tool.pytest.ini_options]"
            check_content[os.path.join(d, "tox.ini")] = b"[pytest]"
            check_content[os.path.join(d, "setup.cfg")] = b"[tool:pytest]"

        return ConfigFilesRequest(
            discovery=self.config_discovery,
            check_existence=check_existence,
            check_content=check_content,
        )

    @memoized_method
    def validate_pytest_cov_included(self) -> None:
        for s in self.extra_requirements:
            try:
                req = PipRequirement.parse(s).project_name
            except Exception as e:
                raise ValueError(
                    f"Invalid requirement '{s}' in `[pytest].extra_requirements`: {e}"
                )
            if canonicalize_project_name(req) == "pytest-cov":
                return

        raise ValueError(
            softwrap(f"""
                You set `[test].use_coverage`, but `[pytest].extra_requirements` is missing
                `pytest-cov`, which is needed to collect coverage data.

                This happens when overriding the `extra_requirements` option. Please either explicitly
                add back `pytest-cov` or use `extra_requirements.add` to keep Pants's default, rather than
                overriding it. Run `{bin_name()} help-advanced pytest` to see the default version of
                `pytest-cov` and see {doc_url('options#list-values')} for more on adding vs.
                overriding list options.
                """))
Exemple #22
0
class PythonToolBase(PythonToolRequirementsBase):
    """Base class for subsystems that configure a python tool to be invoked out-of-process."""

    # Subclasses must set.
    default_main: ClassVar[MainSpecification]

    console_script = StrOption(
        "--console-script",
        advanced=True,
        default=lambda cls:
        (cls.default_main.spec
         if isinstance(cls.default_main, ConsoleScript) else None),
        help=softwrap("""
            The console script for the tool. Using this option is generally preferable to
            (and mutually exclusive with) specifying an --entry-point since console script
            names have a higher expectation of staying stable across releases of the tool.
            Usually, you will not want to change this from the default.
            """),
    )
    entry_point = StrOption(
        "--entry-point",
        advanced=True,
        default=lambda cls:
        (cls.default_main.spec
         if isinstance(cls.default_main, EntryPoint) else None),
        help=softwrap("""
            The entry point for the tool. Generally you only want to use this option if the
            tool does not offer a --console-script (which this option is mutually exclusive
            with). Usually, you will not want to change this from the default.
            """),
    )

    @property
    def main(self) -> MainSpecification:
        is_default_console_script = self.options.is_default("console_script")
        is_default_entry_point = self.options.is_default("entry_point")
        if not is_default_console_script and not is_default_entry_point:
            raise OptionsError(
                softwrap(f"""
                    Both [{self.options_scope}].console-script={self.console_script} and
                    [{self.options_scope}].entry-point={self.entry_point} are configured
                    but these options are mutually exclusive. Please pick one.
                    """))
        if not is_default_console_script:
            assert self.console_script is not None
            return ConsoleScript(self.console_script)
        if not is_default_entry_point:
            assert self.entry_point is not None
            return EntryPoint.parse(self.entry_point)
        return self.default_main

    def to_pex_request(
        self,
        *,
        interpreter_constraints: InterpreterConstraints | None = None,
        extra_requirements: Iterable[str] = (),
        main: MainSpecification | None = None,
        sources: Digest | None = None,
    ) -> PexRequest:
        return super().to_pex_request(
            interpreter_constraints=interpreter_constraints,
            extra_requirements=extra_requirements,
            main=main or self.main,
            sources=sources,
        )
Exemple #23
0
class DockerOptions(Subsystem):
    options_scope = "docker"
    help = "Options for interacting with Docker."

    _registries = DictOption[Any](
        "--registries",
        help=softwrap("""
            Configure Docker registries. The schema for a registry entry is as follows:

                {
                    "registry-alias": {
                        "address": "registry-domain:port",
                        "default": bool,
                    },
                    ...
                }

            If no registries are provided in a `docker_image` target, then all default
            addresses will be used, if any.

            The `docker_image.registries` may be provided with a list of registry addresses
            and registry aliases prefixed with `@` to be used instead of the defaults.

            A configured registry is marked as default either by setting `default = true`
            or with an alias of `"default"`.
            """),
        fromfile=True,
    )
    default_repository = StrOption(
        "--default-repository",
        help=(softwrap(f"""
                Configure the default repository name used in the Docker image tag.

                The value is formatted and may reference these variables (in addition to the normal
                placeheolders derived from the Dockerfile and build args etc):

                {bullet_list(["name", "directory", "parent_directory"])}

                Example: `--default-repository="{{directory}}/{{name}}"`.

                The `name` variable is the `docker_image`'s target name, `directory` and
                `parent_directory` are the name of the directory in which the BUILD file is for the
                target, and its parent directory respectively.

                Use the `repository` field to set this value directly on a `docker_image` target.

                Any registries or tags are added to the image name as required, and should
                not be part of the repository name.
                """)),
        default="{name}",
    )
    default_context_root = WorkspacePathOption(
        "--default-context-root",
        default="",
        help=softwrap("""
            Provide a default Docker build context root path for `docker_image` targets that
            does not specify their own `context_root` field.

            The context root is relative to the build root by default, but may be prefixed
            with `./` to be relative to the directory of the BUILD file of the `docker_image`.

            Examples:

                --default-context-root=src/docker
                --default-context-root=./relative_to_the_build_file
            """),
    )
    _build_args = ShellStrListOption(
        "--build-args",
        help=softwrap(f"""
            Global build arguments (for Docker `--build-arg` options) to use for all
            `docker build` invocations.

            Entries are either strings in the form `ARG_NAME=value` to set an explicit value;
            or just `ARG_NAME` to copy the value from Pants's own environment.

            Example:

                [{options_scope}]
                build_args = ["VAR1=value", "VAR2"]


            Use the `extra_build_args` field on a `docker_image` target for additional
            image specific build arguments.
            """),
    )
    build_target_stage = StrOption(
        "--build-target-stage",
        default=None,
        help=softwrap("""
            Global default value for `target_stage` on `docker_image` targets, overriding
            the field value on the targets, if there is a matching stage in the `Dockerfile`.

            This is useful to provide from the command line, to specify the target stage to
            build for at execution time.
            """),
    )
    build_verbose = BoolOption(
        "--build-verbose",
        default=False,
        help=
        "Whether to log the Docker output to the console. If false, only the image ID is logged.",
    )
    _env_vars = ShellStrListOption(
        "--env-vars",
        help=softwrap("""
            Environment variables to set for `docker` invocations.

            Entries are either strings in the form `ENV_VAR=value` to set an explicit value;
            or just `ENV_VAR` to copy the value from Pants's own environment.
            """),
        advanced=True,
    )
    run_args = ShellStrListOption(
        "--run-args",
        default=["--interactive", "--tty"] if sys.stdout.isatty() else [],
        help=softwrap(f"""
            Additional arguments to use for `docker run` invocations.

            Example:

                $ {bin_name()} run --{options_scope}-run-args="-p 127.0.0.1:80:8080/tcp\
                    --name demo" src/example:image -- [image entrypoint args]

            To provide the top-level options to the `docker` client, use
            `[{options_scope}].env_vars` to configure the
            [Environment variables]({doc_links['docker_env_vars']}) as appropriate.

            The arguments for the image entrypoint may be passed on the command line after a
            double dash (`--`), or using the `--run-args` option.

            Defaults to `--interactive --tty` when stdout is connected to a terminal.
            """),
    )
    _executable_search_paths = StrListOption(
        "--executable-search-paths",
        default=["<PATH>"],
        help=softwrap("""
            The PATH value that will be used to find the Docker client and any tools required.

            The special string `"<PATH>"` will expand to the contents of the PATH env var.
            """),
        advanced=True,
        metavar="<binary-paths>",
    )
    _tools = StrListOption(
        "--tools",
        default=[],
        help=softwrap("""
            List any additional executable tools required for Docker to work. The paths to
            these tools will be included in the PATH used in the execution sandbox, so that
            they may be used by the Docker client.
            """),
        advanced=True,
    )

    @property
    def build_args(self) -> tuple[str, ...]:
        return tuple(sorted(set(self._build_args)))

    @property
    def env_vars(self) -> tuple[str, ...]:
        return tuple(sorted(set(self._env_vars)))

    @property
    def tools(self) -> tuple[str, ...]:
        return tuple(sorted(set(self._tools)))

    @memoized_method
    def registries(self) -> DockerRegistries:
        return DockerRegistries.from_dict(self._registries)

    @memoized_method
    def executable_search_path(self, env: Environment) -> tuple[str, ...]:
        def iter_path_entries():
            for entry in self._executable_search_paths:
                if entry == "<PATH>":
                    path = env.get("PATH")
                    if path:
                        yield from path.split(os.pathsep)
                else:
                    yield entry

        return tuple(OrderedSet(iter_path_entries()))
Exemple #24
0
class PythonSetup(Subsystem):
    options_scope = "python"
    help = "Options for Pants's Python backend."

    default_interpreter_constraints = ["CPython>=3.7,<4"]
    default_interpreter_universe = [
        "2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"
    ]

    interpreter_constraints = StrListOption(
        "--interpreter-constraints",
        default=default_interpreter_constraints,
        help=
        ("The Python interpreters your codebase is compatible with.\n\nSpecify with "
         "requirement syntax, e.g. 'CPython>=2.7,<3' (A CPython interpreter with version "
         ">=2.7 AND version <3) or 'PyPy' (A pypy interpreter of any version). Multiple "
         "constraint strings will be ORed together.\n\nThese constraints are used as the "
         "default value for the `interpreter_constraints` field of Python targets."
         ),
        advanced=True,
        metavar="<requirement>",
    )
    interpreter_universe = StrListOption(
        "--interpreter-versions-universe",
        default=default_interpreter_universe,
        help=
        ("All known Python major/minor interpreter versions that may be used by either "
         "your code or tools used by your code.\n\n"
         "This is used by Pants to robustly handle interpreter constraints, such as knowing "
         "when generating lockfiles which Python versions to check if your code is "
         "using.\n\n"
         "This does not control which interpreter your code will use. Instead, to set your "
         "interpreter constraints, update `[python].interpreter_constraints`, the "
         "`interpreter_constraints` field, and relevant tool options like "
         "`[isort].interpreter_constraints` to tell Pants which interpreters your code "
         f"actually uses. See {doc_url('python-interpreter-compatibility')}.\n\n"
         "All elements must be the minor and major Python version, e.g. '2.7' or '3.10'. Do "
         "not include the patch version.\n\n"),
        advanced=True,
    )
    requirement_constraints = FileOption(
        "--requirement-constraints",
        default=None,
        help=
        ("When resolving third-party requirements for your own code (vs. tools you run), "
         "use this constraints file to determine which versions to use.\n\n"
         "This only applies when resolving user requirements, rather than tools you run "
         "like Black and Pytest. To constrain tools, set `[tool].lockfile`, e.g. "
         "`[black].lockfile`.\n\n"
         "See https://pip.pypa.io/en/stable/user_guide/#constraints-files for more "
         "information on the format of constraint files and how constraints are applied in "
         "Pex and pip.\n\n"
         "Mutually exclusive with `[python].enable_resolves`."),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolve_all_constraints = BoolOption(
        "--resolve-all-constraints",
        default=True,
        help=
        ("If enabled, when resolving requirements, Pants will first resolve your entire "
         "constraints file as a single global resolve. Then, if the code uses a subset of "
         "your constraints file, Pants will extract the relevant requirements from that "
         "global resolve so that only what's actually needed gets used. If disabled, Pants "
         "will not use a global resolve and will resolve each subset of your requirements "
         "independently."
         "\n\nUsually this option should be enabled because it can result in far fewer "
         "resolves."
         "\n\nRequires [python].requirement_constraints to be set."),
        advanced=True,
    )
    enable_resolves = BoolOption(
        "--enable-resolves",
        default=False,
        help=
        ("Set to true to enable the multiple resolves mechanism. See "
         "`[python].resolves` for an explanation of this feature.\n\n"
         "Warning: the `generate-lockfiles` goal does not yet work if you have VCS (Git) "
         "requirements and local requirements. Support is coming in a future Pants release. You "
         "can still use multiple resolves, but you must manually generate your lockfiles rather "
         "than using the `generate-lockfiles` goal, e.g. by running `pip freeze`. Specifically, "
         "set up `[python].resolves` to point to your manually generated lockfile paths, and "
         "then set `[python].resolves_generate_lockfiles = false` in `pants.toml`.\n\n"
         "You may also run into issues generating lockfiles when using Poetry as the generator, "
         "rather than Pex. See the option `[python].lockfile_generator` for more "
         "information.\n\n"
         "The resolves feature offers three major benefits compared to "
         "`[python].requirement_constraints`:\n\n"
         "  1. Uses `--hash` to validate that all downloaded files are expected, which "
         "reduces the risk of supply chain attacks.\n"
         "  2. Enforces that all transitive dependencies are in the lockfile, whereas "
         "constraints allow you to leave off dependencies. This ensures your build is more "
         "stable and reduces the risk of supply chain attacks.\n"
         "  3. Allows you to have multiple resolves in your repository.\n\n"
         "Mutually exclusive with `[python].requirement_constraints`."),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolves = DictOption[str](
        "--resolves",
        default={
            "python-default": "3rdparty/python/default.lock"
        },
        help=
        ("A mapping of logical names to lockfile paths used in your project.\n\n"
         "Many organizations only need a single resolve for their whole project, which is "
         "a good default and the simplest thing to do. However, you may need multiple "
         "resolves, such as if you use two conflicting versions of a requirement in "
         "your repository.\n\n"
         "For now, Pants only has first-class support for disjoint resolves, meaning that "
         "you cannot ergonomically set a `python_requirement` or `python_source` target, "
         "for example, to work with multiple resolves. Practically, this means that you "
         "cannot yet ergonomically reuse common code, such as util files, across projects "
         "using different resolves. Support for overlapping resolves is coming in Pants 2.11 "
         "through a new 'parametrization' feature.\n\n"
         f"If you only need a single resolve, run `{bin_name()} generate-lockfiles` to "
         "generate the lockfile.\n\n"
         "If you need multiple resolves:\n\n"
         "  1. Via this option, define multiple resolve "
         "names and their lockfile paths. The names should be meaningful to your "
         "repository, such as `data-science` or `pants-plugins`.\n"
         "  2. Set the default with `[python].default_resolve`.\n"
         "  3. Update your `python_requirement` targets with the "
         "`resolve` field to declare which resolve they should "
         "be available in. They default to `[python].default_resolve`, so you "
         "only need to update targets that you want in non-default resolves. "
         "(Often you'll set this via the `python_requirements` or `poetry_requirements` "
         "target generators)\n"
         f"  4. Run `{bin_name()} generate-lockfiles` to generate the lockfiles. If the results "
         "aren't what you'd expect, adjust the prior step.\n"
         "  5. Update any targets like `python_source` / `python_sources`, "
         "`python_test` / `python_tests`, and `pex_binary` which need to set a non-default "
         "resolve with the `resolve` field.\n\n"
         "You can name the lockfile paths what you would like; Pants does not expect a "
         "certain file extension or location.\n\n"
         "Only applies if `[python].enable_resolves` is true."),
        advanced=True,
    )
    default_resolve = StrOption(
        "--default-resolve",
        default="python-default",
        help=("The default value used for the `resolve` field.\n\n"
              "The name must be defined as a resolve in `[python].resolves`."),
        advanced=True,
    )
    _resolves_to_interpreter_constraints = DictOption["list[str]"](
        "--resolves-to-interpreter-constraints",
        help=
        ("Override the interpreter constraints to use when generating a resolve's lockfile "
         "with the `generate-lockfiles` goal.\n\n"
         "By default, each resolve from `[python].resolves` will use your "
         "global interpreter constraints set in `[python].interpreter_constraints`. With "
         "this option, you can override each resolve to use certain interpreter "
         "constraints, such as `{'data-science': ['==3.8.*']}`.\n\n"
         "Pants will validate that the interpreter constraints of your code using a "
         "resolve are compatible with that resolve's own constraints. For example, if your "
         "code is set to use ['==3.9.*'] via the `interpreter_constraints` field, but it's "
         "also using a resolve whose interpreter constraints are set to ['==3.7.*'], then "
         "Pants will error explaining the incompatibility.\n\n"
         "The keys must be defined as resolves in `[python].resolves`."),
        advanced=True,
    )
    invalid_lockfile_behavior = EnumOption(
        "--invalid-lockfile-behavior",
        default=InvalidLockfileBehavior.error,
        help=
        ("The behavior when a lockfile has requirements or interpreter constraints that are "
         "not compatible with what the current build is using.\n\n"
         "We recommend keeping the default of `error` for CI builds.\n\n"
         "Note that `warn` will still expect a Pants lockfile header, it only won't error if "
         "the lockfile is stale and should be regenerated. Use `ignore` to avoid needing a "
         "lockfile header at all, e.g. if you are manually managing lockfiles rather than "
         "using the `generate-lockfiles` goal."),
        advanced=True,
    )
    _lockfile_generator = EnumOption(
        "--lockfile-generator",
        default=LockfileGenerator.POETRY,
        help=
        ("Whether to use Pex or Poetry with the `generate-lockfiles` goal.\n\n"
         "Poetry does not work with `[python-repos]` for custom indexes/cheeseshops. If you use "
         "this feature, you should use Pex.\n\n"
         "Several users have also had issues with how Poetry's lockfile generation handles "
         "environment markers for transitive dependencies; certain dependencies end up with "
         "nonsensical environment markers which cause the dependency to not be installed, then "
         "for Pants/Pex to complain the dependency is missing, even though it's in the "
         "lockfile. There is a workaround: for `[python].resolves`, manually create a "
         "`python_requirement` target for the problematic transitive dependencies so that they "
         "are seen as direct requirements, rather than transitive. For tool lockfiles, add the "
         "problematic transitive dependency to `[tool].extra_requirements`, e.g. "
         "`[isort].extra_requirements`. Then, regenerate the lockfile(s) with the "
         "`generate-lockfiles` goal. Alternatively, use Pex for generation.\n\n"
         "Finally, installing from a Poetry-generated lockfile is slower than installing from a "
         "Pex lockfile.\n\n"
         "However, Pex lockfile generation is a new feature. Given how vast the Python packaging "
         "ecosystem is, it is possible you may experience edge cases / bugs we haven't yet "
         "covered. Bug reports are appreciated! "
         "https://github.com/pantsbuild/pants/issues/new/choose\n\n"
         "Note that while Pex generates locks in a proprietary JSON format, you can use the "
         f"`{bin_name()} export` goal for Pants to create a virtual environment for "
         f"interoperability with tools like IDEs."),
    )
    resolves_generate_lockfiles = BoolOption(
        "--resolves-generate-lockfiles",
        default=True,
        help=
        ("If False, Pants will not attempt to generate lockfiles for `[python].resolves` when "
         "running the `generate-lockfiles` goal.\n\n"
         "This is intended to allow you to manually generate lockfiles as a workaround for the "
         "issues described in the `[python].enable_resolves` option.\n\n"
         "If you set this to False, Pants will not attempt to validate the metadata headers "
         "for your user lockfiles. This is useful so that you can keep "
         "`[python].invalid_lockfile_behavior` to `error` or `warn` if you'd like so that tool "
         "lockfiles continue to be validated, while user lockfiles are skipped."
         ),
        advanced=True,
    )
    run_against_entire_lockfile = BoolOption(
        "--run-against-entire-lockfile",
        default=False,
        help=
        ("If enabled, when running binaries, tests, and repls, Pants will use the entire "
         "lockfile/constraints file instead of just the relevant subset. This can improve "
         "performance and reduce cache size, but has two consequences: 1) All cached test "
         "results will be invalidated if any requirement in the lockfile changes, rather "
         "than just those that depend on the changed requirement. 2) Requirements unneeded "
         "by a test/run/repl will be present on the sys.path, which might in rare cases "
         "cause their behavior to change.\n\n"
         "This option does not affect packaging deployable artifacts, such as "
         "PEX files, wheels and cloud functions, which will still use just the exact "
         "subset of requirements needed."),
        advanced=True,
    )
    resolver_manylinux = StrOption(
        "--resolver-manylinux",
        default="manylinux2014",
        help=
        "Whether to allow resolution of manylinux wheels when resolving requirements for "
        "foreign linux platforms. The value should be a manylinux platform upper bound, "
        "e.g.: 'manylinux2010', or else the string 'no' to disallow.",
        advanced=True,
    )
    tailor_ignore_solitary_init_files = BoolOption(
        "--tailor-ignore-solitary-init-files",
        default=True,
        help=
        "Don't tailor `python_sources` targets for solitary `__init__.py` files, as "
        "those usually exist as import scaffolding rather than true library code.\n\n"
        "Set to False if you commonly have packages containing real code in "
        "`__init__.py` and there are no other .py files in the package.",
        advanced=True,
    )
    tailor_requirements_targets = BoolOption(
        "--tailor-requirements-targets",
        default=True,
        help="Tailor python_requirements() targets for requirements files.",
        advanced=True,
    )
    tailor_pex_binary_targets = BoolOption(
        "--tailor-pex-binary-targets",
        default=True,
        help="Tailor pex_binary() targets for Python entry point files.",
        advanced=True,
    )
    macos_big_sur_compatibility = BoolOption(
        "--macos-big-sur-compatibility",
        default=False,
        help=
        "If set, and if running on MacOS Big Sur, use macosx_10_16 as the platform "
        "when building wheels. Otherwise, the default of macosx_11_0 will be used. "
        "This may be required for pip to be able to install the resulting distribution "
        "on Big Sur.",
    )

    @property
    def generate_lockfiles_with_pex(self) -> bool:
        """Else, generate with Poetry."""
        if self.options.is_default("lockfile_generator"):
            warn_or_error(
                "2.12.0.dev0",
                "`[python].lockfile_generator` defaulting to 'poetry'",
                softwrap(f"""
                    In Pants 2.12, Pants will default to using Pex to generate lockfiles
                    with the `generate-lockfiles` goal, rather than Poetry. Run
                    `{bin_name()} help-advanced python` for more information on the benefits and
                    possible issues with switching to Pex.

                    To keep using Poetry, set `[python].lockfile_generator = 'poetry'` in
                    pants.toml. To try Pex, set to 'pex'.

                    Note that you can incrementally switch to Pex lockfiles if you want to reduce
                    risk while migrating. The option `[python].lockfile_generator` only impacts
                    how Pants generates new lockfiles; you can continue to use
                    requirements.txt-style lockfiles (i.e. those generated by Poetry) even if
                    new lockfiles are generated in Pex's JSON format. For example, you can run
                    `{bin_name()} --python-lockfile-generator=pex generate-lockfiles
                    --resolve=isort` to only regenerate the isort lockfile.
                    """),
            )

        return self._lockfile_generator == LockfileGenerator.PEX

    @memoized_property
    def resolves_to_interpreter_constraints(
            self) -> dict[str, tuple[str, ...]]:
        result = {}
        for resolve, ics in self._resolves_to_interpreter_constraints.items():
            if resolve not in self.resolves:
                raise KeyError(
                    "Unrecognized resolve name in the option "
                    f"`[python].resolves_to_interpreter_constraints`: {resolve}. Each "
                    "key must be one of the keys in `[python].resolves`: "
                    f"{sorted(self.resolves.keys())}")
            result[resolve] = tuple(ics)
        return result

    def resolve_all_constraints_was_set_explicitly(self) -> bool:
        return not self.options.is_default("resolve_all_constraints")

    @property
    def manylinux(self) -> str | None:
        manylinux = cast(Optional[str], self.resolver_manylinux)
        if manylinux is None or manylinux.lower() in ("false", "no", "none"):
            return None
        return manylinux

    @property
    def manylinux_pex_args(self) -> Iterator[str]:
        if self.manylinux:
            yield "--manylinux"
            yield self.manylinux
        else:
            yield "--no-manylinux"

    @property
    def scratch_dir(self):
        return os.path.join(self.options.pants_workdir,
                            *self.options_scope.split("."))

    def compatibility_or_constraints(
            self, compatibility: Iterable[str] | None) -> tuple[str, ...]:
        """Return either the given `compatibility` field or the global interpreter constraints.

        If interpreter constraints are supplied by the CLI flag, return those only.
        """
        if self.options.is_flagged("interpreter_constraints"):
            return self.interpreter_constraints
        return tuple(compatibility or self.interpreter_constraints)

    def compatibilities_or_constraints(
            self, compatibilities: Iterable[Iterable[str] | None]
    ) -> tuple[str, ...]:
        return tuple(
            constraint for compatibility in compatibilities
            for constraint in self.compatibility_or_constraints(compatibility))
Exemple #25
0
class PythonToolRequirementsBase(Subsystem):
    """Base class for subsystems that configure a set of requirements for a python tool."""

    # Subclasses must set.
    default_version: ClassVar[str]
    # Subclasses do not need to override.
    default_extra_requirements: ClassVar[Sequence[str]] = []

    default_interpreter_constraints: ClassVar[Sequence[str]] = []
    register_interpreter_constraints: ClassVar[bool] = False

    # If this tool does not mix with user requirements (e.g. Flake8 and Isort, but not Pylint and
    # Pytest), you should set this to True.
    #
    # You also need to subclass `GenerateToolLockfileSentinel` and create a rule that goes from
    # it -> GeneratePythonLockfile by calling `GeneratePythonLockfile.from_python_tool()`.
    # Register the UnionRule.
    register_lockfile: ClassVar[bool] = False
    default_lockfile_resource: ClassVar[tuple[str, str] | None] = None
    default_lockfile_url: ClassVar[str | None] = None
    uses_requirements_from_source_plugins: ClassVar[bool] = False

    version = StrOption(
        "--version",
        advanced=True,
        default=lambda cls: cls.default_version,
        help="Requirement string for the tool.",
    )
    extra_requirements = StrListOption(
        "--extra-requirements",
        advanced=True,
        default=lambda cls: cls.default_extra_requirements,
        help=
        "Any additional requirement strings to use with the tool. This is useful if the "
        "tool allows you to install plugins or if you need to constrain a dependency to "
        "a certain version.",
    )
    _interpreter_constraints = StrListOption(
        "--interpreter-constraints",
        register_if=lambda cls: cls.register_interpreter_constraints,
        advanced=True,
        default=lambda cls: cls.default_interpreter_constraints,
        help="Python interpreter constraints for this tool.",
    )

    _lockfile = StrOption(
        "--lockfile",
        register_if=lambda cls: cls.register_lockfile,
        default=DEFAULT_TOOL_LOCKFILE,
        advanced=True,
        help=lambda cls: softwrap(f"""
            Path to a lockfile used for installing the tool.

            Set to the string `{DEFAULT_TOOL_LOCKFILE}` to use a lockfile provided by
            Pants, so long as you have not changed the `--version` and
            `--extra-requirements` options, and the tool's interpreter constraints are
            compatible with the default. Pants will error or warn if the lockfile is not
            compatible (controlled by `[python].invalid_lockfile_behavior`). See
            {cls.default_lockfile_url} for the default lockfile contents.

            Set to the string `{NO_TOOL_LOCKFILE}` to opt out of using a lockfile. We
            do not recommend this, though, as lockfiles are essential for reproducible builds.

            To use a custom lockfile, set this option to a file path relative to the
            build root, then run `{bin_name()} generate-lockfiles --resolve={cls.options_scope}`.

            As explained at {doc_url('python-third-party-dependencies')}, lockfile generation
            via `generate-lockfiles` does not always work and you may want to manually generate
            the lockfile. You will want to set `[python].invalid_lockfile_behavior = 'ignore'` so
            that Pants does not complain about missing lockfile headers.
            """),
    )

    def __init__(self, *args, **kwargs):
        if self.default_interpreter_constraints and not self.register_interpreter_constraints:
            raise ValueError(
                softwrap(f"""
                    `default_interpreter_constraints` are configured for `{self.options_scope}`, but
                    `register_interpreter_constraints` is not set to `True`, so the
                    `--interpreter-constraints` option will not be registered. Did you mean to set
                    this?
                    """))

        if self.register_lockfile and (not self.default_lockfile_resource
                                       or not self.default_lockfile_url):
            raise ValueError(
                softwrap(f"""
                    The class property `default_lockfile_resource` and `default_lockfile_url`
                    must be set if `register_lockfile` is set. See `{self.options_scope}`.
                    """))

        super().__init__(*args, **kwargs)

    @property
    def all_requirements(self) -> tuple[str, ...]:
        """All the raw requirement strings to install the tool.

        This may not include transitive dependencies: these are top-level requirements.
        """
        return (self.version, *self.extra_requirements)

    def pex_requirements(
        self,
        *,
        extra_requirements: Iterable[str] = (),
    ) -> PexRequirements | EntireLockfile:
        """The requirements to be used when installing the tool.

        If the tool supports lockfiles, the returned type will install from the lockfile rather than
        `all_requirements`.
        """

        requirements = (*self.all_requirements, *extra_requirements)

        if not self.uses_lockfile:
            return PexRequirements(requirements)

        hex_digest = calculate_invalidation_digest(requirements)

        lockfile: ToolDefaultLockfile | ToolCustomLockfile
        if self.lockfile == DEFAULT_TOOL_LOCKFILE:
            assert self.default_lockfile_resource is not None
            lockfile = ToolDefaultLockfile(
                file_content=FileContent(
                    f"{self.options_scope}_default.lock",
                    importlib.resources.read_binary(
                        *self.default_lockfile_resource),
                ),
                lockfile_hex_digest=hex_digest,
                resolve_name=self.options_scope,
                uses_project_interpreter_constraints=(
                    not self.register_interpreter_constraints),
                uses_source_plugins=self.uses_requirements_from_source_plugins,
            )
        else:
            lockfile = ToolCustomLockfile(
                file_path=self.lockfile,
                file_path_description_of_origin=
                f"the option `[{self.options_scope}].lockfile`",
                lockfile_hex_digest=hex_digest,
                resolve_name=self.options_scope,
                uses_project_interpreter_constraints=(
                    not self.register_interpreter_constraints),
                uses_source_plugins=self.uses_requirements_from_source_plugins,
            )
        return EntireLockfile(lockfile,
                              complete_req_strings=tuple(requirements))

    @property
    def lockfile(self) -> str:
        f"""The path to a lockfile or special strings '{NO_TOOL_LOCKFILE}' and '{DEFAULT_TOOL_LOCKFILE}'.

        This assumes you have set the class property `register_lockfile = True`.
        """
        return self._lockfile

    @property
    def uses_lockfile(self) -> bool:
        """Return true if the tool is installed from a lockfile.

        Note that this lockfile may be the default lockfile Pants distributes.
        """
        return self.register_lockfile and self.lockfile != NO_TOOL_LOCKFILE

    @property
    def uses_custom_lockfile(self) -> bool:
        """Return true if the tool is installed from a custom lockfile the user sets up."""
        return self.register_lockfile and self.lockfile not in (
            NO_TOOL_LOCKFILE,
            DEFAULT_TOOL_LOCKFILE,
        )

    @property
    def interpreter_constraints(self) -> InterpreterConstraints:
        """The interpreter constraints to use when installing and running the tool.

        This assumes you have set the class property `register_interpreter_constraints = True`.
        """
        return InterpreterConstraints(self._interpreter_constraints)

    def to_pex_request(
        self,
        *,
        interpreter_constraints: InterpreterConstraints | None = None,
        extra_requirements: Iterable[str] = (),
        main: MainSpecification | None = None,
        sources: Digest | None = None,
    ) -> PexRequest:
        return PexRequest(
            output_filename=f"{self.options_scope.replace('-', '_')}.pex",
            internal_only=True,
            requirements=self.pex_requirements(
                extra_requirements=extra_requirements),
            interpreter_constraints=interpreter_constraints
            or self.interpreter_constraints,
            main=main,
            sources=sources,
        )
Exemple #26
0
class TwineSubsystem(PythonToolBase):
    options_scope = "twine"
    name = "Twine"
    help = "The utility for publishing Python distributions to PyPi and other Python repositories."

    default_version = "twine>=3.7.1,<3.8"
    default_main = ConsoleScript("twine")

    # This explicit dependency resolves a weird behavior in poetry, where it would include a sys
    # platform constraint on "Windows" when this was included transitively from the twine
    # requirements.
    # See: https://github.com/pantsbuild/pants/pull/13594#issuecomment-968154931
    default_extra_requirements = ["colorama>=0.4.3"]

    register_interpreter_constraints = True
    default_interpreter_constraints = ["CPython>=3.7,<4"]

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.subsystems",
                                 "twine.lock")
    default_lockfile_path = "src/python/pants/backend/python/subsystems/twine.lock"
    default_lockfile_url = git_url(default_lockfile_path)

    skip = SkipOption("publish")
    args = ArgsListOption(example="--skip-existing")
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls:
        ("Path to a .pypirc config file to use. "
         "(https://packaging.python.org/specifications/pypirc/)\n\n"
         f"Setting this option will disable `[{cls.options_scope}].config_discovery`. Use "
         "this option if the config is located in a non-standard location."),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls:
        ("If true, Pants will include all relevant config files during runs "
         "(`.pypirc`).\n\n"
         f"Use `[{cls.options_scope}].config` instead if your config is in a "
         "non-standard location."),
    )
    ca_certs_path = StrOption(
        "--ca-certs-path",
        advanced=True,
        default="<inherit>",
        help=
        ("Path to a file containing PEM-format CA certificates used for verifying secure "
         "connections when publishing python distributions.\n\n"
         'Uses the value from `[GLOBAL].ca_certs_path` by default. Set to `"<none>"` to '
         "not use the default CA certificate."),
    )

    def config_request(self) -> ConfigFilesRequest:
        # Refer to https://twine.readthedocs.io/en/latest/#configuration for how config files are
        # discovered.
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_existence=[".pypirc"],
        )

    def ca_certs_digest_request(
            self, default_ca_certs_path: str | None) -> CreateDigest | None:
        ca_certs_path: str | None = self.ca_certs_path
        if ca_certs_path == "<inherit>":
            ca_certs_path = default_ca_certs_path
        if not ca_certs_path or ca_certs_path == "<none>":
            return None

        # The certs file will typically not be in the repo, so we can't digest it via a PathGlobs.
        # Instead we manually create a FileContent for it.
        ca_certs_content = Path(ca_certs_path).read_bytes()
        chrooted_ca_certs_path = os.path.basename(ca_certs_path)
        return CreateDigest((FileContent(chrooted_ca_certs_path,
                                         ca_certs_content), ))
Exemple #27
0
class CoverageSubsystem(PythonToolBase):
    options_scope = "coverage-py"
    help = "Configuration for Python test coverage measurement."

    default_version = "coverage[toml]>=5.5,<5.6"
    default_main = ConsoleScript("coverage")

    register_interpreter_constraints = True
    default_interpreter_constraints = ["CPython>=3.7,<4"]

    register_lockfile = True
    default_lockfile_resource = ("pants.backend.python.subsystems",
                                 "coverage_py.lock")
    default_lockfile_path = "src/python/pants/backend/python/subsystems/coverage_py.lock"
    default_lockfile_url = git_url(default_lockfile_path)

    filter = StrListOption(
        "--filter",
        help=softwrap("""
            A list of Python modules or filesystem paths to use in the coverage report, e.g.
            `['helloworld_test', 'helloworld/util/dirutil'].

            Both modules and directory paths are recursive: any submodules or child paths,
            respectively, will be included.

            If you leave this off, the coverage report will include every file
            in the transitive closure of the address/file arguments; for example, `test ::`
            will include every Python file in your project, whereas
            `test project/app_test.py` will include `app_test.py` and any of its transitive
            dependencies.
            """),
    )
    reports = EnumListOption(
        "--report",
        default=[CoverageReportType.CONSOLE],
        help="Which coverage report type(s) to emit.",
    )
    _output_dir = StrOption(
        "--output-dir",
        default=str(PurePath("{distdir}", "coverage", "python")),
        advanced=True,
        help=
        "Path to write the Pytest Coverage report to. Must be relative to the build root.",
    )
    config = FileOption(
        "--config",
        default=None,
        advanced=True,
        help=lambda cls: softwrap(f"""
            Path to an INI or TOML config file understood by coverage.py
            (https://coverage.readthedocs.io/en/stable/config.html).

            Setting this option will disable `[{cls.options_scope}].config_discovery`. Use
            this option if the config is located in a non-standard location.
            """),
    )
    config_discovery = BoolOption(
        "--config-discovery",
        default=True,
        advanced=True,
        help=lambda cls: softwrap(f"""
            If true, Pants will include any relevant config files during runs
            (`.coveragerc`, `setup.cfg`, `tox.ini`, and `pyproject.toml`).

            Use `[{cls.options_scope}].config` instead if your config is in a
            non-standard location.
            """),
    )
    global_report = BoolOption(
        "--global-report",
        default=False,
        help=softwrap("""
            If true, Pants will generate a global coverage report.

            The global report will include all Python source files in the workspace and not just
            those depended on by the tests that were run.
            """),
    )
    fail_under = FloatOption(
        "--fail-under",
        default=None,
        help=softwrap("""
            Fail if the total combined coverage percentage for all tests is less than this
            number.

            Use this instead of setting fail_under in a coverage.py config file,
            as the config will apply to each test separately, while you typically want this
            to apply to the combined coverage for all tests run.

            Note that you must generate at least one (non-raw) coverage report for this
            check to trigger.

            Note also that if you specify a non-integral value, you must
            also set [report] precision properly in the coverage.py config file to make use
            of the decimal places. See https://coverage.readthedocs.io/en/latest/config.html.
            """),
    )

    def output_dir(self, distdir: DistDir) -> PurePath:
        return PurePath(self._output_dir.format(distdir=distdir.relpath))

    @property
    def config_request(self) -> ConfigFilesRequest:
        # Refer to https://coverage.readthedocs.io/en/stable/config.html.
        return ConfigFilesRequest(
            specified=self.config,
            specified_option_name=f"[{self.options_scope}].config",
            discovery=self.config_discovery,
            check_existence=[".coveragerc"],
            check_content={
                "setup.cfg": b"[coverage:",
                "tox.ini": b"[coverage:]",
                "pyproject.toml": b"[tool.coverage",
            },
        )
Exemple #28
0
class TailorSubsystem(GoalSubsystem):
    name = "tailor"
    help = "Auto-generate BUILD file targets for new source files."

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return PutativeTargetsRequest in union_membership

    check = BoolOption(
        "--check",
        default=False,
        help=(
            "Do not write changes to disk, only write back what would change. Return code "
            "0 means there would be no changes, and 1 means that there would be. "
        ),
    )
    build_file_name = StrOption(
        "--build-file-name",
        default="BUILD",
        help=(
            "The name to use for generated BUILD files.\n\n"
            "This must be compatible with `[GLOBAL].build_patterns`."
        ),
        advanced=True,
    )
    build_file_header = StrOption(
        "--build-file-header",
        default=None,
        help="A header, e.g., a copyright notice, to add to the content of created BUILD files.",
        advanced=True,
    )
    build_file_indent = StrOption(
        "--build-file-indent",
        default="    ",
        help="The indent to use when auto-editing BUILD files.",
        advanced=True,
    )
    _alias_mapping = DictOption[str](
        "--alias-mapping",
        help=(
            "A mapping from standard target type to custom type to use instead. The custom "
            "type can be a custom target type or a macro that offers compatible functionality "
            f"to the one it replaces (see {doc_url('macros')})."
        ),
        advanced=True,
    )
    ignore_paths = StrListOption(
        "--ignore-paths",
        help=(
            "Do not edit or create BUILD files at these paths.\n\n"
            "Can use literal file names and/or globs, e.g. "
            "`['project/BUILD, 'ignore_me/**']`.\n\n"
            "This augments the option `[GLOBAL].build_ignore`, which tells Pants to also not "
            "_read_ BUILD files at certain paths. In contrast, this option only tells Pants to "
            "not edit/create BUILD files at the specified paths."
        ),
        advanced=True,
    )
    _ignore_adding_targets = StrListOption(
        "--ignore-adding-targets",
        help=(
            "Do not add these target definitions.\n\n"
            "Expects a list of target addresses that would normally be added by `tailor`, "
            "e.g. `['project:tgt']`. To find these names, you can run `tailor --check`, then "
            "combine the BUILD file path with the target's name. For example, if `tailor` "
            "would add the target `bin` to `project/BUILD`, then the address would be "
            "`project:bin`. If the BUILD file is at the root of your repository, use `//` for "
            "the path, e.g. `//:bin`.\n\n"
            "Does not work with macros."
        ),
        advanced=True,
    )

    @property
    def ignore_adding_targets(self) -> set[str]:
        return set(self._ignore_adding_targets)

    def alias_for(self, standard_type: str) -> str | None:
        # The get() could return None, but casting to str | None errors.
        # This cast suffices to avoid typecheck errors.
        return cast(str, self._alias_mapping.get(standard_type))

    def validate_build_file_name(self, build_file_patterns: tuple[str, ...]) -> None:
        """Check that the specified BUILD file name works with the repository's BUILD file
        patterns."""
        filespec = Filespec(includes=list(build_file_patterns))
        if not bool(matches_filespec(filespec, paths=[self.build_file_name])):
            raise ValueError(
                f"The option `[{self.options_scope}].build_file_name` is set to "
                f"`{self.build_file_name}`, which is not compatible with "
                f"`[GLOBAL].build_patterns`: {sorted(build_file_patterns)}. This means that "
                "generated BUILD files would be ignored.\n\n"
                "To fix, please update the options so that they are compatible."
            )

    def filter_by_ignores(
        self, putative_targets: Iterable[PutativeTarget], build_file_ignores: tuple[str, ...]
    ) -> Iterator[PutativeTarget]:
        ignore_paths_filespec = Filespec(includes=[*self.ignore_paths, *build_file_ignores])
        for ptgt in putative_targets:
            is_ignored_file = bool(
                matches_filespec(
                    ignore_paths_filespec,
                    paths=[os.path.join(ptgt.path, self.build_file_name)],
                )
            )
            if is_ignored_file:
                continue
            if ptgt.addressable:
                # Note that `tailor` can only generate explicit targets, so we don't need to
                # worry about generated address syntax (`#`) or file address syntax.
                address = f"{ptgt.path or '//'}:{ptgt.name}"
                if address in self.ignore_adding_targets:
                    continue
            yield ptgt
Exemple #29
0
class TestSubsystem(GoalSubsystem):
    name = "test"
    help = "Run tests."

    # Prevent this class from being detected by pytest as a test class.
    __test__ = False

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return TestFieldSet in union_membership

    debug = BoolOption(
        "--debug",
        default=False,
        help=softwrap("""
            Run tests sequentially in an interactive process. This is necessary, for
            example, when you add breakpoints to your code.
            """),
    )
    force = BoolOption(
        "--force",
        default=False,
        help=
        "Force the tests to run, even if they could be satisfied from cache.",
    )
    output = EnumOption(
        "--output",
        default=ShowOutput.FAILED,
        help="Show stdout/stderr for these tests.",
    )
    use_coverage = BoolOption(
        "--use-coverage",
        default=False,
        help="Generate a coverage report if the test runner supports it.",
    )
    open_coverage = BoolOption(
        "--open-coverage",
        default=False,
        help=softwrap("""
            If a coverage report file is generated, open it on the local system if the
            system supports this.
            """),
    )
    report = BoolOption("--report",
                        default=False,
                        advanced=True,
                        help="Write test reports to --report-dir.")
    default_report_path = str(PurePath("{distdir}", "test", "reports"))
    _report_dir = StrOption(
        "--report-dir",
        default=default_report_path,
        advanced=True,
        help=
        "Path to write test reports to. Must be relative to the build root.",
    )
    extra_env_vars = StrListOption(
        "--extra-env-vars",
        help=softwrap("""
            Additional environment variables to include in test processes.
            Entries are strings in the form `ENV_VAR=value` to use explicitly; or just
            `ENV_VAR` to copy the value of a variable in Pants's own environment.
            """),
    )
    shard = StrOption(
        "--shard",
        default="",
        help=softwrap("""
            A shard specification of the form "k/N", where N is a positive integer and k is a
            non-negative integer less than N.

            If set, the request input targets will be deterministically partitioned into N disjoint
            subsets of roughly equal size, and only the k'th subset will be used, with all others
            discarded.

            Useful for splitting large numbers of test files across multiple machines in CI.
            For example, you can run three shards with --shard=0/3, --shard=1/3, --shard=2/3.

            Note that the shards are roughly equal in size as measured by number of files.
            No attempt is made to consider the size of different files, the time they have
            taken to run in the past, or other such sophisticated measures.
            """),
    )

    def report_dir(self, distdir: DistDir) -> PurePath:
        return PurePath(self._report_dir.format(distdir=distdir.relpath))
Exemple #30
0
class PythonSetup(Subsystem):
    options_scope = "python"
    help = "Options for Pants's Python backend."

    default_interpreter_constraints = ["CPython>=3.7,<4"]
    default_interpreter_universe = ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]

    interpreter_constraints = StrListOption(
        "--interpreter-constraints",
        default=default_interpreter_constraints,
        help=softwrap(
            """
            The Python interpreters your codebase is compatible with.

            These constraints are used as the default value for the `interpreter_constraints`
            field of Python targets.

            Specify with requirement syntax, e.g. 'CPython>=2.7,<3' (A CPython interpreter with
            version >=2.7 AND version <3) or 'PyPy' (A pypy interpreter of any version). Multiple
            constraint strings will be ORed together.
            """
        ),
        advanced=True,
        metavar="<requirement>",
    )
    interpreter_universe = StrListOption(
        "--interpreter-versions-universe",
        default=default_interpreter_universe,
        help=softwrap(
            f"""
            All known Python major/minor interpreter versions that may be used by either
            your code or tools used by your code.

            This is used by Pants to robustly handle interpreter constraints, such as knowing
            when generating lockfiles which Python versions to check if your code is using.

            This does not control which interpreter your code will use. Instead, to set your
            interpreter constraints, update `[python].interpreter_constraints`, the
            `interpreter_constraints` field, and relevant tool options like
            `[isort].interpreter_constraints` to tell Pants which interpreters your code
            actually uses. See {doc_url('python-interpreter-compatibility')}.

            All elements must be the minor and major Python version, e.g. '2.7' or '3.10'. Do
            not include the patch version.
            """
        ),
        advanced=True,
    )
    enable_resolves = BoolOption(
        "--enable-resolves",
        default=False,
        help=softwrap(
            f"""
            Set to true to enable lockfiles for user code. See `[python].resolves` for an
            explanation of this feature.

            Warning: the `generate-lockfiles` goal does not yet work if you have local
            requirements, regardless of using Pex vs. Poetry for the lockfile generator.
            Support is coming in a future Pants release. In the meantime, the workaround is to host
            the files in a custom repository with `[python-repos]`
            ({doc_url('python-third-party-dependencies#custom-repositories')}).

            You may also run into issues generating lockfiles when using Poetry as the generator,
            rather than Pex. See the option `[python].lockfile_generator` for more
            information.

            This option is mutually exclusive with `[python].requirement_constraints`. We strongly
            recommend using this option because it:

              1. Uses `--hash` to validate that all downloaded files are expected, which reduces\
                the risk of supply chain attacks.
              2. Enforces that all transitive dependencies are in the lockfile, whereas\
                constraints allow you to leave off dependencies. This ensures your build is more\
                stable and reduces the risk of supply chain attacks.
              3. Allows you to have multiple lockfiles in your repository.
            """
        ),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolves = DictOption[str](
        "--resolves",
        default={"python-default": "3rdparty/python/default.lock"},
        help=softwrap(
            f"""
            A mapping of logical names to lockfile paths used in your project.

            Many organizations only need a single resolve for their whole project, which is
            a good default and often the simplest thing to do. However, you may need multiple
            resolves, such as if you use two conflicting versions of a requirement in
            your repository.

            If you only need a single resolve, run `{bin_name()} generate-lockfiles` to
            generate the lockfile.

            If you need multiple resolves:

              1. Via this option, define multiple resolve names and their lockfile paths.\
                The names should be meaningful to your repository, such as `data-science` or\
                `pants-plugins`.
              2. Set the default with `[python].default_resolve`.
              3. Update your `python_requirement` targets with the `resolve` field to declare which\
                resolve they should be available in. They default to `[python].default_resolve`,\
                so you only need to update targets that you want in non-default resolves.\
                (Often you'll set this via the `python_requirements` or `poetry_requirements`\
                target generators)
              4. Run `{bin_name()} generate-lockfiles` to generate the lockfiles. If the results\
                aren't what you'd expect, adjust the prior step.
              5. Update any targets like `python_source` / `python_sources`,\
                `python_test` / `python_tests`, and `pex_binary` which need to set a non-default\
                resolve with the `resolve` field.

            If a target can work with multiple resolves, you can either use the `parametrize`
            mechanism or manually create a distinct target per resolve. See {doc_url("targets")}
            for information about `parametrize`.

            For example:

                python_sources(
                    resolve=parametrize("data-science", "web-app"),
                )

            You can name the lockfile paths what you would like; Pants does not expect a
            certain file extension or location.

            Only applies if `[python].enable_resolves` is true.
            """
        ),
        advanced=True,
    )
    default_resolve = StrOption(
        "--default-resolve",
        default="python-default",
        help=softwrap(
            """
            The default value used for the `resolve` field.

            The name must be defined as a resolve in `[python].resolves`.
            """
        ),
        advanced=True,
    )
    _resolves_to_interpreter_constraints = DictOption["list[str]"](
        "--resolves-to-interpreter-constraints",
        help=softwrap(
            """
            Override the interpreter constraints to use when generating a resolve's lockfile
            with the `generate-lockfiles` goal.

            By default, each resolve from `[python].resolves` will use your
            global interpreter constraints set in `[python].interpreter_constraints`. With
            this option, you can override each resolve to use certain interpreter
            constraints, such as `{'data-science': ['==3.8.*']}`.

            Warning: this does NOT impact the interpreter constraints used by targets within the
            resolve, which is instead set by the option `[python.interpreter_constraints` and the
            `interpreter_constraints` field. It only impacts how the lockfile is generated.

            Pants will validate that the interpreter constraints of your code using a
            resolve are compatible with that resolve's own constraints. For example, if your
            code is set to use ['==3.9.*'] via the `interpreter_constraints` field, but it's
            using a resolve whose interpreter constraints are set to ['==3.7.*'], then
            Pants will error explaining the incompatibility.

            The keys must be defined as resolves in `[python].resolves`.
            """
        ),
        advanced=True,
    )
    invalid_lockfile_behavior = EnumOption(
        "--invalid-lockfile-behavior",
        default=InvalidLockfileBehavior.error,
        help=softwrap(
            """
            The behavior when a lockfile has requirements or interpreter constraints that are
            not compatible with what the current build is using.

            We recommend keeping the default of `error` for CI builds.

            Note that `warn` will still expect a Pants lockfile header, it only won't error if
            the lockfile is stale and should be regenerated. Use `ignore` to avoid needing a
            lockfile header at all, e.g. if you are manually managing lockfiles rather than
            using the `generate-lockfiles` goal.
            """
        ),
        advanced=True,
    )
    _lockfile_generator = EnumOption(
        "--lockfile-generator",
        default=LockfileGenerator.PEX,
        help=softwrap(
            f"""
            Whether to use Pex or Poetry with the `generate-lockfiles` goal.

            Poetry does not support these features:

              1) `[python-repos]` for custom indexes/cheeseshops.
              2) VCS (Git) requirements.
              3) `[GLOBAL].ca_certs_path`.

            If you use any of these features, you should use Pex.

            Several users have also had issues with how Poetry's lockfile generation handles
            environment markers for transitive dependencies; certain dependencies end up with
            nonsensical environment markers which cause the dependency to not be installed, then
            for Pants/Pex to complain the dependency is missing, even though it's in the
            lockfile. There is a workaround: for `[python].resolves`, manually create a
            `python_requirement` target for the problematic transitive dependencies so that they
            are seen as direct requirements, rather than transitive. For tool lockfiles, add the
            problematic transitive dependency to `[tool].extra_requirements`, e.g.
            `[isort].extra_requirements`. Then, regenerate the lockfile(s) with the
            `generate-lockfiles` goal. Alternatively, use Pex for generation.

            Finally, installing from a Poetry-generated lockfile is slower than installing from a
            Pex lockfile. When using a Pex lockfile, Pants will only install the subset needed
            for the current task.

            However, Pex lockfile generation is a new feature. Given how vast the Python packaging
            ecosystem is, it is possible you may experience edge cases / bugs we haven't yet
            covered. Bug reports are appreciated!
            https://github.com/pantsbuild/pants/issues/new/choose

            Note that while Pex generates locks in a proprietary JSON format, you can use the
            `{bin_name()} export` goal for Pants to create a virtual environment for
            interoperability with tools like IDEs.
            """
        ),
        advanced=True,
    )
    resolves_generate_lockfiles = BoolOption(
        "--resolves-generate-lockfiles",
        default=True,
        help=softwrap(
            """
            If False, Pants will not attempt to generate lockfiles for `[python].resolves` when
            running the `generate-lockfiles` goal.

            This is intended to allow you to manually generate lockfiles as a workaround for the
            issues described in the `[python].lockfile_generator` option, if you are not yet ready
            to use Pex.

            If you set this to False, Pants will not attempt to validate the metadata headers
            for your user lockfiles. This is useful so that you can keep
            `[python].invalid_lockfile_behavior` to `error` or `warn` if you'd like so that tool
            lockfiles continue to be validated, while user lockfiles are skipped.
            """
        ),
        advanced=True,
    )
    run_against_entire_lockfile = BoolOption(
        "--run-against-entire-lockfile",
        default=False,
        help=softwrap(
            """
            If enabled, when running binaries, tests, and repls, Pants will use the entire
            lockfile file instead of just the relevant subset.

            We generally do not recommend this if `[python].lockfile_generator` is set to `"pex"`
            thanks to performance enhancements we've made. When using Pex lockfiles, you should
            get similar performance to using this option but without the downsides mentioned below.

            Otherwise, if not using Pex lockfiles, this option can improve
            performance and reduce cache size. But it has two consequences: 1) All cached test
            results will be invalidated if any requirement in the lockfile changes, rather
            than just those that depend on the changed requirement. 2) Requirements unneeded
            by a test/run/repl will be present on the sys.path, which might in rare cases
            cause their behavior to change.

            This option does not affect packaging deployable artifacts, such as
            PEX files, wheels and cloud functions, which will still use just the exact
            subset of requirements needed.
            """
        ),
        advanced=True,
    )
    requirement_constraints = FileOption(
        "--requirement-constraints",
        default=None,
        help=softwrap(
            """
            When resolving third-party requirements for your own code (vs. tools you run),
            use this constraints file to determine which versions to use.

            Mutually exclusive with `[python].enable_resolves`, which we generally recommend as an
            improvement over constraints file.

            See https://pip.pypa.io/en/stable/user_guide/#constraints-files for more
            information on the format of constraint files and how constraints are applied in
            Pex and pip.

            This only applies when resolving user requirements, rather than tools you run
            like Black and Pytest. To constrain tools, set `[tool].lockfile`, e.g.
            `[black].lockfile`.
            """
        ),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolve_all_constraints = BoolOption(
        "--resolve-all-constraints",
        default=True,
        help=softwrap(
            """
            (Only relevant when using `[python].requirement_constraints.`) If enabled, when
            resolving requirements, Pants will first resolve your entire
            constraints file as a single global resolve. Then, if the code uses a subset of
            your constraints file, Pants will extract the relevant requirements from that
            global resolve so that only what's actually needed gets used. If disabled, Pants
            will not use a global resolve and will resolve each subset of your requirements
            independently.

            Usually this option should be enabled because it can result in far fewer resolves.
            """
        ),
        advanced=True,
    )
    no_binary = StrListOption(
        "--no-binary",
        help=softwrap(
            """
            Do not use binary packages (i.e., wheels) for these 3rdparty projects.

            Also accepts `:all:` to disable all binary packages.

            Note that some packages are tricky to compile and may fail to install when this option
            is used on them. See https://pip.pypa.io/en/stable/cli/pip_install/#install-no-binary
            for details.

            Note: Only takes effect if you use Pex lockfiles. Set
            `[python].lockfile_generator = "pex"` and run the `generate-lockfiles` goal.
            """
        ),
    )
    only_binary = StrListOption(
        "--only-binary",
        help=softwrap(
            """
            Do not use source packages (i.e., sdists) for these 3rdparty projects.

            Also accepts `:all:` to disable all source packages.

            Packages without binary distributions will fail to install when this option is used on
            them. See https://pip.pypa.io/en/stable/cli/pip_install/#install-only-binary for
            details.

            Note: Only takes effect if you use Pex lockfiles. Set
            `[python].lockfile_generator = "pex"` and run the `generate-lockfiles` goal.
            """
        ),
    )
    resolver_manylinux = StrOption(
        "--resolver-manylinux",
        default="manylinux2014",
        help=softwrap(
            """
            Whether to allow resolution of manylinux wheels when resolving requirements for
            foreign linux platforms. The value should be a manylinux platform upper bound,
            e.g.: 'manylinux2010', or else the string 'no' to disallow.
            """
        ),
        advanced=True,
    )

    tailor_source_targets = BoolOption(
        "--tailor-source-targets",
        default=True,
        help=softwrap(
            """
            If true, add `python_sources`, `python_tests`, and `python_test_utils` targets with
            the `tailor` goal."""
        ),
        advanced=True,
    )
    tailor_ignore_solitary_init_files = BoolOption(
        "--tailor-ignore-solitary-init-files",
        default=True,
        help=softwrap(
            """
            If true, don't add `python_sources` targets for solitary `__init__.py` files with the
            `tailor` goal.

            Solitary `__init__.py` files usually exist as import scaffolding rather than true
            library code, so it can be noisy to add BUILD files.

            Set to false if you commonly have packages containing real code in
            `__init__.py` without other `.py` files in the package.
            """
        ),
        advanced=True,
    )
    tailor_requirements_targets = BoolOption(
        "--tailor-requirements-targets",
        default=True,
        help=softwrap(
            """
            If true, add `python_requirements`, `poetry_requirements`, and `pipenv_requirements`
            target generators with the `tailor` goal.

            `python_requirements` targets are added for any file that matches the pattern
            `*requirements*.txt`. You will need to manually add `python_requirements` for different
            file names like `reqs.txt`.

            `poetry_requirements` targets are added for `pyproject.toml` files with `[tool.poetry`
            in them.
            """
        ),
        advanced=True,
    )
    tailor_pex_binary_targets = BoolOption(
        "--tailor-pex-binary-targets",
        default=True,
        help=softwrap(
            """
            If true, add `pex_binary` targets for Python files named `__main__.py` or with a
            `__main__` clause with the `tailor` goal.
            """
        ),
        advanced=True,
    )

    macos_big_sur_compatibility = BoolOption(
        "--macos-big-sur-compatibility",
        default=False,
        help=softwrap(
            """
            If set, and if running on MacOS Big Sur, use macosx_10_16 as the platform
            when building wheels. Otherwise, the default of macosx_11_0 will be used.
            This may be required for pip to be able to install the resulting distribution
            on Big Sur.
            """
        ),
        advanced=True,
    )

    @property
    def generate_lockfiles_with_pex(self) -> bool:
        """Else, generate with Poetry."""
        return self._lockfile_generator == LockfileGenerator.PEX

    @memoized_property
    def resolves_to_interpreter_constraints(self) -> dict[str, tuple[str, ...]]:
        result = {}
        for resolve, ics in self._resolves_to_interpreter_constraints.items():
            if resolve not in self.resolves:
                raise KeyError(
                    softwrap(
                        f"""
                        Unrecognized resolve name in the option
                        `[python].resolves_to_interpreter_constraints`: {resolve}. Each
                        key must be one of the keys in `[python].resolves`:
                        {sorted(self.resolves.keys())}
                        """
                    )
                )
            result[resolve] = tuple(ics)
        return result

    def resolve_all_constraints_was_set_explicitly(self) -> bool:
        return not self.options.is_default("resolve_all_constraints")

    @property
    def manylinux(self) -> str | None:
        manylinux = cast(Optional[str], self.resolver_manylinux)
        if manylinux is None or manylinux.lower() in ("false", "no", "none"):
            return None
        return manylinux

    @property
    def manylinux_pex_args(self) -> Iterator[str]:
        if self.manylinux:
            yield "--manylinux"
            yield self.manylinux
        else:
            yield "--no-manylinux"

    @property
    def scratch_dir(self):
        return os.path.join(self.options.pants_workdir, *self.options_scope.split("."))

    def compatibility_or_constraints(self, compatibility: Iterable[str] | None) -> tuple[str, ...]:
        """Return either the given `compatibility` field or the global interpreter constraints.

        If interpreter constraints are supplied by the CLI flag, return those only.
        """
        if self.options.is_flagged("interpreter_constraints"):
            return self.interpreter_constraints
        return tuple(compatibility or self.interpreter_constraints)

    def compatibilities_or_constraints(
        self, compatibilities: Iterable[Iterable[str] | None]
    ) -> tuple[str, ...]:
        return tuple(
            constraint
            for compatibility in compatibilities
            for constraint in self.compatibility_or_constraints(compatibility)
        )