class FilterSubsystem(LineOriented, GoalSubsystem): name = "filter" help = ( "Filter the input targets based on various criteria.\n\nMost of the filtering options " "below are comma-separated lists of filtering criteria, with an implied logical OR between " "them, so that a target passes the filter if it matches any of the criteria in the list. " "A '-' prefix inverts the sense of the entire comma-separated list, so that a target " "passes the filter only if it matches none of the criteria in the list.\n\nEach of the " "filtering options may be specified multiple times, with an implied logical AND between " "them.") target_type = StrListOption( "--target-type", metavar="[+-]type1,type2,...", help= "Filter on these target types, e.g. `resources` or `python_sources`.", ) granularity = EnumOption( "--granularity", default=TargetGranularity.all_targets, help= ("Filter to rendering only targets declared in BUILD files, only file-level " "targets, or all targets."), ) address_regex = StrListOption( "--address-regex", metavar="[+-]regex1,regex2,...", help="Filter on target addresses matching these regexes.", ) tag_regex = StrListOption( "--tag-regex", metavar="[+-]regex1,regex2,...", help="Filter on targets with tags matching these regexes.", )
class PythonNativeCode(Subsystem): options_scope = "python-native-code" help = "Options for building native code using Python, e.g. when resolving distributions." # TODO(#7735): move the --cpp-flags and --ld-flags to a general subprocess support subsystem. cpp_flags = StrListOption( "--cpp-flags", default=safe_shlex_split(os.environ.get("CPPFLAGS", "")), help= "Override the `CPPFLAGS` environment variable for any forked subprocesses.", advanced=True, ) ld_flags = StrListOption( "--ld-flags", default=safe_shlex_split(os.environ.get("LDFLAGS", "")), help= "Override the `LDFLAGS` environment variable for any forked subprocesses.", advanced=True, ) @property def environment_dict(self) -> Dict[str, str]: return { "CPPFLAGS": safe_shlex_join(self.cpp_flags), "LDFLAGS": safe_shlex_join(self.ld_flags), }
class PythonBootstrapSubsystem(Subsystem): options_scope = "python-bootstrap" help = softwrap(""" Options used to locate Python interpreters used by all Pants backends. This subsystem controls where and how Pants will locate Python, but beyond that it does not control which Python interpreter versions are actually used for your code: see the `python` subsystem for that. """) search_path = StrListOption( "--search-path", default=["<PYENV>", "<PATH>"], help=softwrap(""" A list of paths to search for Python interpreters. Which interpeters are actually used from these paths is context-specific: the Python backend selects interpreters using options on the `python` subsystem, in particular, the `[python].interpreter_constraints` option. You can specify absolute paths to interpreter binaries and/or to directories containing interpreter binaries. The order of entries does not matter. The following special strings are supported: * `<PATH>`, the contents of the PATH env var * `<ASDF>`, all Python versions currently configured by ASDF \ `(asdf shell, ${HOME}/.tool-versions)`, with a fallback to all installed versions * `<ASDF_LOCAL>`, the ASDF interpreter with the version in BUILD_ROOT/.tool-versions * `<PYENV>`, all Python versions under $(pyenv root)/versions * `<PYENV_LOCAL>`, the Pyenv interpreter with the version in BUILD_ROOT/.python-version * `<PEXRC>`, paths in the PEX_PYTHON_PATH variable in /etc/pexrc or ~/.pexrc """), advanced=True, metavar="<binary-paths>", ) names = StrListOption( "--names", default=["python", "python3"], help=softwrap(""" The names of Python binaries to search for. See the `--search-path` option to influence where interpreters are searched for. This does not impact which Python interpreter is used to run your code, only what is used to run internal tools. """), advanced=True, metavar="<python-binary-names>", )
class GolangSubsystem(Subsystem): options_scope = "golang" help = "Options for Golang support." _go_search_paths = StrListOption( "--go-search-paths", default=["<PATH>"], help= ("A list of paths to search for Go.\n\n" "Specify absolute paths to directories with the `go` binary, e.g. `/usr/bin`. " "Earlier entries will be searched first.\n\n" "The special string '<PATH>' will expand to the contents of the PATH env var." ), ) # TODO(#13005): Support multiple Go versions in a project? expected_version = StrOption( "--expected-version", default="1.17", help= ("The Go version you are using, such as `1.17`.\n\n" "Pants will only use Go distributions from `--go-search-paths` that have the " "expected version, and it will error if none are found.\n\n" "Do not include the patch version."), ) _subprocess_env_vars = StrListOption( "--subprocess-env-vars", default=["LANG", "LC_CTYPE", "LC_ALL", "PATH"], help= ("Environment variables to set when invoking the `go` tool. " "Entries are either strings in the form `ENV_VAR=value` to set an explicit value; " "or just `ENV_VAR` to copy the value from Pants's own environment."), advanced=True, ) def go_search_paths(self, env: Environment) -> tuple[str, ...]: def iter_path_entries(): for entry in self._go_search_paths: if entry == "<PATH>": path = env.get("PATH") if path: yield from path.split(os.pathsep) else: yield entry return tuple(OrderedSet(iter_path_entries())) @property def env_vars_to_pass_to_subprocesses(self) -> tuple[str, ...]: return tuple(sorted(set(self._subprocess_env_vars)))
class LintSubsystem(GoalSubsystem): name = "lint" help = "Run all linters and/or formatters in check mode." @classmethod def activated(cls, union_membership: UnionMembership) -> bool: return LintTargetsRequest in union_membership or LintFilesRequest in union_membership only = StrListOption( "--only", help=only_option_help("lint", "linter", "flake8", "shellcheck"), ) skip_formatters = BoolOption( "--skip-formatters", default=False, help=softwrap( f""" If true, skip running all formatters in check-only mode. FYI: when running `{bin_name()} fmt lint ::`, there should be little performance benefit to using this flag. Pants will reuse the results from `fmt` when running `lint`. """ ), ) batch_size = IntOption( "--batch-size", advanced=True, default=128, help=style_batch_size_help(uppercase="Linter", lowercase="linter"), )
class ApacheThriftJavaSubsystem(Subsystem): options_scope = "java-thrift" help = "Options specific to generating Java from Thrift using the Apache Thrift generator" gen_options = StrListOption( "--options", help=softwrap(""" Code generation options specific to the Java code generator to pass to the Apache `thrift` binary via the `-gen java` argument. See `thrift -help` for supported values. """), ) _runtime_dependencies = TargetListOption( "--runtime-dependencies", help=softwrap(""" A list of addresses to `jvm_artifact` targets for the runtime dependencies needed for generated Java code to work. For example, `['3rdparty/jvm:libthrift']`. These dependencies will be automatically added to every `thrift_source` target. At the very least, this option must be set to a `jvm_artifact` for the `org.apache.thrift:libthrift` runtime library. """), ) @property def runtime_dependencies(self) -> UnparsedAddressInputs: return UnparsedAddressInputs(self._runtime_dependencies, owning_address=None)
class ShellSetup(Subsystem): options_scope = "shell-setup" help = "Options for Pants's Shell support." _executable_search_path = StrListOption( "--executable-search-paths", default=["<PATH>"], help= ("The PATH value that will be used to find shells and to run certain processes " "like the shunit2 test runner.\n\n" 'The special string "<PATH>" will expand to the contents of the PATH env var.' ), advanced=True, metavar="<binary-paths>", ) dependency_inference = BoolOption( "--dependency-inference", default=True, help= "Infer Shell dependencies on other Shell files by analyzing `source` statements.", advanced=True, ) @memoized_method def executable_search_path(self, env: Environment) -> tuple[str, ...]: def iter_path_entries(): for entry in self._executable_search_path: if entry == "<PATH>": path = env.get("PATH") if path: yield from path.split(os.pathsep) else: yield entry return tuple(OrderedSet(iter_path_entries()))
class ScalaPBSubsystem(JvmToolBase): options_scope = "scalapb" help = "The ScalaPB protocol buffer compiler (https://scalapb.github.io/)." default_version = "0.11.6" default_artifacts = ("com.thesamet.scalapb:scalapbc_2.13:{version}",) default_lockfile_resource = ( "pants.backend.codegen.protobuf.scala", "scalapbc.default.lockfile.txt", ) default_lockfile_path = ( "src/python/pants/backend/codegen/protobuf/scala/scalapbc.default.lockfile.txt" ) default_lockfile_url = git_url(default_lockfile_path) _jvm_plugins = StrListOption( "--jvm-plugins", help=softwrap( """ A list of JVM-based `protoc` plugins to invoke when generating Scala code from protobuf files. The format for each plugin specifier is `NAME=ARTIFACT` where NAME is the name of the plugin and ARTIFACT is either the address of a `jvm_artifact` target or the colon-separated Maven coordinate for the plugin's jar artifact. For example, to invoke the fs2-grpc protoc plugin, the following option would work: `--scalapb-jvm-plugins=fs2=org.typelevel:fs2-grpc-codegen_2.12:2.3.1`. (Note: you would also need to set --scalapb-runtime-dependencies appropriately to include the applicable runtime libraries for your chosen protoc plugins.) """ ), ) @property def jvm_plugins(self) -> tuple[PluginArtifactSpec, ...]: return tuple(PluginArtifactSpec.from_str(pa_str) for pa_str in self._jvm_plugins)
class GenerateLockfilesSubsystem(GoalSubsystem): name = "generate-lockfiles" help = "Generate lockfiles for Python third-party dependencies." @classmethod def activated(cls, union_membership: UnionMembership) -> bool: return (GenerateToolLockfileSentinel in union_membership or KnownUserResolveNamesRequest in union_membership) resolve_names = StrListOption( "--resolve", advanced=False, help= ("Only generate lockfiles for the specified resolve(s).\n\n" "Resolves are the logical names for the different lockfiles used in your project. " "For your own code's dependencies, these come from the option " "`[python].resolves`. For tool lockfiles, resolve " "names are the options scope for that tool such as `black`, `pytest`, and " "`mypy-protobuf`.\n\n" f"For example, you can run `{bin_name()} generate-lockfiles --resolve=black " "--resolve=pytest --resolve=data-science` to only generate lockfiles for those " "two tools and your resolve named `data-science`.\n\n" "If you specify an invalid resolve name, like 'fake', Pants will output all " "possible values.\n\n" "If not specified, Pants will generate lockfiles for all resolves."), ) custom_command = StrOption( "--custom-command", advanced=True, default=None, help= ("If set, lockfile headers will say to run this command to regenerate the lockfile, " f"rather than running `{bin_name()} generate-lockfiles --resolve=<name>` like normal." ), )
class ThriftPythonSubsystem(Subsystem): options_scope = "python-thrift" help = "Options specific to generating Python from Thrift using Apache Thrift" gen_options = StrListOption( "--options", help=softwrap(""" Code generation options specific to the Python code generator to pass to the Apache `thift` binary via the `-gen py` argument. See `thrift -help` for supported values. """), ) infer_runtime_dependency = BoolOption( "--infer-runtime-dependency", default=True, help=softwrap(""" If True, will add a dependency on a `python_requirement` target exposing the `thrift` module (usually from the `thrift` requirement). If `[python].enable_resolves` is set, Pants will only infer dependencies on `python_requirement` targets that use the same resolve as the particular `thrift_source` / `thrift_source` target uses, which is set via its `python_resolve` field. Unless this option is disabled, Pants will error if no relevant target is found or more than one is found which causes ambiguity. """), advanced=True, )
class PexRuntimeEnvironment(Subsystem): options_scope = "pex" help = "How Pants uses Pex to run Python subprocesses." # TODO(#9760): We'll want to deprecate this in favor of a global option which allows for a # per-process override. _executable_search_paths = StrListOption( "--executable-search-paths", default=["<PATH>"], help=softwrap(""" The PATH value that will be used by the PEX subprocess and any subprocesses it spawns. The special string `"<PATH>"` will expand to the contents of the PATH env var. """), advanced=True, metavar="<binary-paths>", ) _verbosity = IntOption( "--verbosity", default=0, help= "Set the verbosity level of PEX logging, from 0 (no logging) up to 9 (max logging).", advanced=True, ) venv_use_symlinks = BoolOption( "--venv-use-symlinks", default=False, help=softwrap(""" When possible, use venvs whose site-packages directories are populated with symlinks. Enabling this can save space in the `--named-caches-dir` directory and lead to slightly faster execution times for Pants Python goals. Some distributions do not work with symlinked venvs though, so you may not be able to enable this optimization as a result. """), advanced=True, ) @memoized_method def path(self, env: Environment) -> tuple[str, ...]: def iter_path_entries(): for entry in self._executable_search_paths: if entry == "<PATH>": path = env.get("PATH") if path: yield from path.split(os.pathsep) else: yield entry return tuple(OrderedSet(iter_path_entries())) @property def verbosity(self) -> int: level = self._verbosity if level < 0 or level > 9: raise ValueError("verbosity level must be between 0 and 9") return level
class TestSubsystem(GoalSubsystem): name = "test" help = "Run tests." # Prevent this class from being detected by pytest as a test class. __test__ = False @classmethod def activated(cls, union_membership: UnionMembership) -> bool: return TestFieldSet in union_membership debug = BoolOption( "--debug", default=False, help= ("Run tests sequentially in an interactive process. This is necessary, for " "example, when you add breakpoints to your code."), ) force = BoolOption( "--force", default=False, help= "Force the tests to run, even if they could be satisfied from cache.", ) output = EnumOption( "--output", default=ShowOutput.FAILED, help="Show stdout/stderr for these tests.", ) use_coverage = BoolOption( "--use-coverage", default=False, help="Generate a coverage report if the test runner supports it.", ) open_coverage = BoolOption( "--open-coverage", default=False, help= ("If a coverage report file is generated, open it on the local system if the " "system supports this."), ) xml_dir = StrOption( "--xml-dir", metavar="<DIR>", default=None, advanced=True, help= ("Specifying a directory causes Junit XML result files to be emitted under " "that dir for each test run that supports producing them."), ) extra_env_vars = StrListOption( "--extra-env-vars", help= ("Additional environment variables to include in test processes. " "Entries are strings in the form `ENV_VAR=value` to use explicitly; or just " "`ENV_VAR` to copy the value of a variable in Pants's own environment." ), )
class SourceRootConfig(Subsystem): options_scope = "source" help = "Configuration for roots of source trees." DEFAULT_ROOT_PATTERNS = [ "/", "src", "src/python", "src/py", "src/thrift", "src/protobuf", "src/protos", "src/scala", "src/java", ] root_patterns = StrListOption( "--root-patterns", default=DEFAULT_ROOT_PATTERNS, help= "A list of source root suffixes. A directory with this suffix will be considered " "a potential source root. E.g., `src/python` will match `<buildroot>/src/python`, " "`<buildroot>/project1/src/python` etc. Prepend a `/` to anchor the match at the " "buildroot. E.g., `/src/python` will match `<buildroot>/src/python` but not " "`<buildroot>/project1/src/python`. A `*` wildcard will match a single path segment, " "e.g., `src/*` will match `<buildroot>/src/python` and `<buildroot>/src/rust`. " "Use `/` to signify that the buildroot itself is a source root. " f"See {doc_url('source-roots')}.", advanced=True, metavar='["pattern1", "pattern2", ...]', ) marker_filenames = StrListOption( "--marker-filenames", help= "The presence of a file of this name in a directory indicates that the directory " "is a source root. The content of the file doesn't matter, and may be empty. " "Useful when you can't or don't wish to centrally enumerate source roots via " "`root_patterns`.", advanced=True, metavar="filename", ) @memoized_method def get_pattern_matcher(self) -> SourceRootPatternMatcher: return SourceRootPatternMatcher(self.root_patterns)
class Scalac(Subsystem): options_scope = "scalac" name = "scalac" help = "The Scala compiler." default_plugins_lockfile_path = ( "src/python/pants/backend/scala/subsystems/scalac_plugins.default.lockfile.txt" ) default_plugins_lockfile_resource = ( "pants.backend.scala.subsystems", "scalac_plugins.default.lockfile.txt", ) args = ArgsListOption(example="-encoding UTF-8") plugins_global = StrListOption( "--plugins-global", help= ("A list of addresses of `scalac_plugin` targets which should be used for " "compilation of all Scala targets in a build.\n\nIf you set this, you must also " "set `[scalac].plugins_global_lockfile`."), advanced=True, removal_version="2.12.0.dev0", removal_hint= "Use `--scalac-plugins-for-resolve` instead to use user resolves", ) # TODO: see if we can use an actual list mechanism? If not, this seems like an OK option default_plugins = DictOption[str]( "--plugins-for-resolve", help= ("A dictionary, whose keys are the names of each JVM resolve that requires default " "`scalac` plugins, and the value is a comma-separated string consisting of scalac plugin " "names. Each specified plugin must have a corresponding `scalac_plugin` target that specifies " "that name in either its `plugin_name` field or is the same as its target name." ), ) plugins_global_lockfile = StrOption( "--plugins-global-lockfile", default=DEFAULT_TOOL_LOCKFILE, help= ("The filename of the lockfile for global plugins. You must set this option to a " "file path, e.g. '3rdparty/jvm/global_scalac_plugins.lock', if you set " "`[scalac].plugins_global`."), advanced=True, removal_version="2.12.0.dev0", removal_hint= "Use `--scalac-plugins-for-resolve` instead, which will add plugin dependencies to JVM user resolves.", ) def parsed_default_plugins(self) -> dict[str, list[str]]: return { key: [i.strip() for i in value.split(",")] for key, value in self.default_plugins.items() }
class ApacheThriftJavaSubsystem(Subsystem): options_scope = "java-thrift" help = "Options specific to generating Java from Thrift using the Apache Thrift generator" gen_options = StrListOption( "--options", help=softwrap(""" Code generation options specific to the Java code generator to pass to the Apache `thrift` binary via the `-gen java` argument. See `thrift -help` for supported values. """), )
class CheckSubsystem(GoalSubsystem): name = "check" help = "Run type checking or the lightest variant of compilation available for a language." @classmethod def activated(cls, union_membership: UnionMembership) -> bool: return CheckRequest in union_membership only = StrListOption( "--only", help=only_option_help("check", "checkers", "mypy", "javac"), )
class JvmSubsystem(Subsystem): options_scope = "jvm" help = ( "Options for general JVM functionality.\n\n" " JDK strings will be passed directly to Coursier's `--jvm` parameter." " Run `cs java --available` to see a list of available JVM versions on your platform.\n\n" " If the string 'system' is passed, Coursier's `--system-jvm` option will be used" " instead, but note that this can lead to inconsistent behavior since the JVM version" " will be whatever happens to be found first on the system's PATH.") tool_jdk = StrOption( "--tool-jdk", default="adopt:1.11", help= ("The JDK to use when building and running Pants' internal JVM support code and other " "non-compiler tools. See `jvm` help for supported values."), advanced=True, ) jdk = StrOption( "--jdk", default="adopt:1.11", help= ("The JDK to use.\n\n" " This string will be passed directly to Coursier's `--jvm` parameter." " Run `cs java --available` to see a list of available JVM versions on your platform.\n\n" " If the string 'system' is passed, Coursier's `--system-jvm` option will be used" " instead, but note that this can lead to inconsistent behavior since the JVM version" " will be whatever happens to be found first on the system's PATH."), advanced=True, ) resolves = DictOption( "--resolves", default={"jvm-default": "3rdparty/jvm/default.lock"}, # TODO: expand help message help= "A dictionary mapping resolve names to the path of their lockfile.", ) default_resolve = StrOption( "--default-resolve", default="jvm-default", help= ("The default value used for the `resolve` and `compatible_resolves` fields.\n\n" "The name must be defined as a resolve in `[jvm].resolves`."), ) debug_args = StrListOption( "--debug-args", help= ("Extra JVM arguments to use when running tests in debug mode.\n\n" "For example, if you want to attach a remote debugger, use something like " "['-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005']" ), )
class JvmToolBase(Subsystem): """Base class for subsystems that configure a set of artifact requirements for a JVM tool.""" # Default version of the tool. (Subclasses may set.) default_version: ClassVar[str | None] = None # Default artifacts for the tool in GROUP:NAME format. The `--version` value will be used for the # artifact version if it has not been specified for a particular requirement. (Subclasses must set.) default_artifacts: ClassVar[tuple[str, ...]] # Default resource for the tool's lockfile. (Subclasses must set.) default_lockfile_resource: ClassVar[tuple[str, str]] default_lockfile_url: ClassVar[str | None] = None version = StrOption( "--version", advanced=True, default=lambda cls: cls.default_version, help=lambda cls: ("Version string for the tool. This is available for substitution in the " f"`[{cls.options_scope}].artifacts` option by including the string " "`{version}`."), ) artifacts = StrListOption( "--artifacts", advanced=True, default=lambda cls: list(cls.default_artifacts), help=lambda cls: ("Artifact requirements for this tool using specified as either the address of a `jvm_artifact` " "target or, alternatively, as a colon-separated Maven coordinates (e.g., group:name:version). " "For Maven coordinates, the string `{version}` version will be substituted with the value of the " f"`[{cls.options_scope}].version` option."), ) lockfile = StrOption( "--lockfile", default=DEFAULT_TOOL_LOCKFILE, advanced=True, help=lambda cls: ("Path to a lockfile used for installing the tool.\n\n" f"Set to the string `{DEFAULT_TOOL_LOCKFILE}` to use a lockfile provided by " "Pants, so long as you have not changed the `--version` option. " f"See {cls.default_lockfile_url} for the default lockfile contents.\n\n" "To use a custom lockfile, set this option to a file path relative to the " f"build root, then run `{bin_name()} jvm-generate-lockfiles " f"--resolve={cls.options_scope}`.\n\n"), ) @property def artifact_inputs(self) -> tuple[str, ...]: return tuple(s.format(version=self.version) for s in self.artifacts)
class PythonRepos(Subsystem): options_scope = "python-repos" help = softwrap(""" External Python code repositories, such as PyPI. These options may be used to point to custom cheeseshops when resolving requirements. """) pypi_index = "https://pypi.org/simple/" repos = StrListOption( "--repos", help=softwrap(""" URLs of code repositories to look for requirements. In Pip and Pex, this option corresponds to the `--find-links` option. """), advanced=True, ) indexes = StrListOption( "--indexes", default=[pypi_index], help=softwrap(""" URLs of code repository indexes to look for requirements. If set to an empty list, then Pex will use no indices (meaning it will not use PyPI). The values should be compliant with PEP 503. """), advanced=True, ) @property def pex_args(self) -> Iterator[str]: # NB: In setting `--no-pypi`, we rely on the default value of `--python-repos-indexes` # including PyPI, which will override `--no-pypi` and result in using PyPI in the default # case. Why set `--no-pypi`, then? We need to do this so that # `--python-repos-repos=['custom_url']` will only point to that index and not include PyPI. yield "--no-pypi" yield from (f"--index={index}" for index in self.indexes) yield from (f"--repo={repo}" for repo in self.repos)
class CoursierSubsystem(TemplatedExternalTool): options_scope = "coursier" name = "coursier" help = "A dependency resolver for the Maven ecosystem. (https://get-coursier.io/)" default_version = "v2.1.0-M5-18-gfebf9838c" default_known_versions = [ "v2.1.0-M5-18-gfebf9838c|linux_arm64 |d4ad15ba711228041ad8a46d848c83c8fbc421d7b01c415d8022074dd609760f|19264005", "v2.1.0-M5-18-gfebf9838c|linux_x86_64|3e1a1ad1010d5582e9e43c5a26b273b0147baee5ebd27d3ac1ab61964041c90b|19551533", "v2.1.0-M5-18-gfebf9838c|macos_arm64 |d13812c5a5ef4c9b3e25cc046d18addd09bacd149f95b20a14e4d2a73e358ecf|18826510", "v2.1.0-M5-18-gfebf9838c|macos_x86_64|d13812c5a5ef4c9b3e25cc046d18addd09bacd149f95b20a14e4d2a73e358ecf|18826510", "v2.0.16-169-g194ebc55c|linux_arm64 |da38c97d55967505b8454c20a90370c518044829398b9bce8b637d194d79abb3|18114472", "v2.0.16-169-g194ebc55c|linux_x86_64|4c61a634c4bd2773b4543fe0fc32210afd343692891121cddb447204b48672e8|18486946", "v2.0.16-169-g194ebc55c|macos_arm64 |15bce235d223ef1d022da30b67b4c64e9228d236b876c834b64e029bbe824c6f|17957182", "v2.0.16-169-g194ebc55c|macos_x86_64|15bce235d223ef1d022da30b67b4c64e9228d236b876c834b64e029bbe824c6f|17957182", ] default_url_template = ( "https://github.com/coursier/coursier/releases/download/{version}/cs-{platform}.gz" ) default_url_platform_mapping = { "macos_arm64": "x86_64-apple-darwin", "macos_x86_64": "x86_64-apple-darwin", "linux_arm64": "aarch64-pc-linux", "linux_x86_64": "x86_64-pc-linux", } repos = StrListOption( "--repos", default=[ "https://maven-central.storage-download.googleapis.com/maven2", "https://repo1.maven.org/maven2", ], help=softwrap(""" Maven style repositories to resolve artifacts from. Coursier will resolve these repositories in the order in which they are specifed, and re-ordering repositories will cause artifacts to be re-downloaded. This can result in artifacts in lockfiles becoming invalid. """), ) def generate_exe(self, plat: Platform) -> str: archive_filename = os.path.basename(self.generate_url(plat)) filename = os.path.splitext(archive_filename)[0] return f"./{filename}"
class LintSubsystem(GoalSubsystem): name = "lint" help = "Run all linters and/or formatters in check mode." @classmethod def activated(cls, union_membership: UnionMembership) -> bool: return LintTargetsRequest in union_membership or LintFilesRequest in union_membership only = StrListOption( "--only", help=only_option_help("lint", "linter", "flake8", "shellcheck"), ) batch_size = IntOption( "--batch-size", advanced=True, default=128, help=style_batch_size_help(uppercase="Linter", lowercase="linter"), )
class FmtSubsystem(GoalSubsystem): name = "fmt" help = "Autoformat source code." @classmethod def activated(cls, union_membership: UnionMembership) -> bool: return FmtRequest in union_membership only = StrListOption( "--only", help=only_option_help("fmt", "formatter", "isort", "shfmt"), ) batch_size = IntOption( "--batch-size", advanced=True, default=128, help=style_batch_size_help(uppercase="Formatter", lowercase="formatter"), )
class SubprocessEnvironment(Subsystem): options_scope = "subprocess-environment" help = "Environment settings for forked subprocesses." _env_vars = StrListOption( "--env-vars", default=["LANG", "LC_CTYPE", "LC_ALL", "SSL_CERT_FILE", "SSL_CERT_DIR"], help=( "Environment variables to set for process invocations.\n\n" "Entries are either strings in the form `ENV_VAR=value` to set an explicit value; " "or just `ENV_VAR` to copy the value from Pants's own environment.\n\n" f"See {doc_url('options#addremove-semantics')} for how to add and remove Pants's " "default for this option." ), advanced=True, ) @property def env_vars_to_pass_to_subprocesses(self) -> Tuple[str, ...]: return tuple(sorted(set(self._env_vars)))
class CoursierSubsystem(TemplatedExternalTool): options_scope = "coursier" name = "coursier" help = "A dependency resolver for the Maven ecosystem." default_version = "v2.0.16-169-g194ebc55c" default_known_versions = [ "v2.0.16-169-g194ebc55c|linux_arm64 |da38c97d55967505b8454c20a90370c518044829398b9bce8b637d194d79abb3|18114472", "v2.0.16-169-g194ebc55c|linux_x86_64|4c61a634c4bd2773b4543fe0fc32210afd343692891121cddb447204b48672e8|18486946", "v2.0.16-169-g194ebc55c|macos_arm64 |15bce235d223ef1d022da30b67b4c64e9228d236b876c834b64e029bbe824c6f|17957182", "v2.0.16-169-g194ebc55c|macos_x86_64|15bce235d223ef1d022da30b67b4c64e9228d236b876c834b64e029bbe824c6f|17957182", ] default_url_template = ( "https://github.com/coursier/coursier/releases/download/{version}/cs-{platform}.gz" ) default_url_platform_mapping = { "macos_arm64": "x86_64-apple-darwin", "macos_x86_64": "x86_64-apple-darwin", "linux_arm64": "aarch64-pc-linux", "linux_x86_64": "x86_64-pc-linux", } repos = StrListOption( "--repos", default=[ "https://maven-central.storage-download.googleapis.com/maven2", "https://repo1.maven.org/maven2", ], help=( "Maven style repositories to resolve artifacts from." "\n\n" "Coursier will resolve these repositories in the order in which they are " "specifed, and re-ordering repositories will cause artifacts to be " "re-downloaded. This can result in artifacts in lockfiles becoming invalid." ), ) def generate_exe(self, plat: Platform) -> str: archive_filename = os.path.basename(self.generate_url(plat)) filename = os.path.splitext(archive_filename)[0] return f"./{filename}"
class ApacheThriftSubsystem(Subsystem): options_scope = "apache-thrift" help = "Apache Thrift IDL compiler (https://thrift.apache.org/)." _thrift_search_paths = StrListOption( "--thrift-search-paths", default=["<PATH>"], help=softwrap(""" A list of paths to search for Thrift. Specify absolute paths to directories with the `thrift` binary, e.g. `/usr/bin`. Earlier entries will be searched first. The special string '<PATH>' will expand to the contents of the PATH env var. """), ) expected_version = StrOption( "--expected-version", default="0.15", help=softwrap(""" The major/minor version of Apache Thrift that you are using, such as `0.15`. Pants will only use Thrift binaries from `--thrift-search-paths` that have the expected version, and it will error if none are found. Do not include the patch version. """), ) def thrift_search_paths(self, env: Environment) -> tuple[str, ...]: def iter_path_entries(): for entry in self._thrift_search_paths: if entry == "<PATH>": path = env.get("PATH") if path: yield from path.split(os.pathsep) else: yield entry return tuple(OrderedSet(iter_path_entries()))
class TestSubsystem(GoalSubsystem): name = "test" help = "Run tests." # Prevent this class from being detected by pytest as a test class. __test__ = False @classmethod def activated(cls, union_membership: UnionMembership) -> bool: return TestFieldSet in union_membership debug = BoolOption( "--debug", default=False, help=softwrap(""" Run tests sequentially in an interactive process. This is necessary, for example, when you add breakpoints to your code. """), ) force = BoolOption( "--force", default=False, help= "Force the tests to run, even if they could be satisfied from cache.", ) output = EnumOption( "--output", default=ShowOutput.FAILED, help="Show stdout/stderr for these tests.", ) use_coverage = BoolOption( "--use-coverage", default=False, help="Generate a coverage report if the test runner supports it.", ) open_coverage = BoolOption( "--open-coverage", default=False, help=softwrap(""" If a coverage report file is generated, open it on the local system if the system supports this. """), ) report = BoolOption("--report", default=False, advanced=True, help="Write test reports to --report-dir.") default_report_path = str(PurePath("{distdir}", "test", "reports")) _report_dir = StrOption( "--report-dir", default=default_report_path, advanced=True, help= "Path to write test reports to. Must be relative to the build root.", ) extra_env_vars = StrListOption( "--extra-env-vars", help=softwrap(""" Additional environment variables to include in test processes. Entries are strings in the form `ENV_VAR=value` to use explicitly; or just `ENV_VAR` to copy the value of a variable in Pants's own environment. """), ) shard = StrOption( "--shard", default="", help=softwrap(""" A shard specification of the form "k/N", where N is a positive integer and k is a non-negative integer less than N. If set, the request input targets will be deterministically partitioned into N disjoint subsets of roughly equal size, and only the k'th subset will be used, with all others discarded. Useful for splitting large numbers of test files across multiple machines in CI. For example, you can run three shards with --shard=0/3, --shard=1/3, --shard=2/3. Note that the shards are roughly equal in size as measured by number of files. No attempt is made to consider the size of different files, the time they have taken to run in the past, or other such sophisticated measures. """), ) def report_dir(self, distdir: DistDir) -> PurePath: return PurePath(self._report_dir.format(distdir=distdir.relpath))
class PythonSetup(Subsystem): options_scope = "python" help = "Options for Pants's Python backend." default_interpreter_constraints = ["CPython>=3.7,<4"] default_interpreter_universe = ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"] interpreter_constraints = StrListOption( "--interpreter-constraints", default=default_interpreter_constraints, help=softwrap( """ The Python interpreters your codebase is compatible with. These constraints are used as the default value for the `interpreter_constraints` field of Python targets. Specify with requirement syntax, e.g. 'CPython>=2.7,<3' (A CPython interpreter with version >=2.7 AND version <3) or 'PyPy' (A pypy interpreter of any version). Multiple constraint strings will be ORed together. """ ), advanced=True, metavar="<requirement>", ) interpreter_universe = StrListOption( "--interpreter-versions-universe", default=default_interpreter_universe, help=softwrap( f""" All known Python major/minor interpreter versions that may be used by either your code or tools used by your code. This is used by Pants to robustly handle interpreter constraints, such as knowing when generating lockfiles which Python versions to check if your code is using. This does not control which interpreter your code will use. Instead, to set your interpreter constraints, update `[python].interpreter_constraints`, the `interpreter_constraints` field, and relevant tool options like `[isort].interpreter_constraints` to tell Pants which interpreters your code actually uses. See {doc_url('python-interpreter-compatibility')}. All elements must be the minor and major Python version, e.g. '2.7' or '3.10'. Do not include the patch version. """ ), advanced=True, ) enable_resolves = BoolOption( "--enable-resolves", default=False, help=softwrap( f""" Set to true to enable lockfiles for user code. See `[python].resolves` for an explanation of this feature. Warning: the `generate-lockfiles` goal does not yet work if you have local requirements, regardless of using Pex vs. Poetry for the lockfile generator. Support is coming in a future Pants release. In the meantime, the workaround is to host the files in a custom repository with `[python-repos]` ({doc_url('python-third-party-dependencies#custom-repositories')}). You may also run into issues generating lockfiles when using Poetry as the generator, rather than Pex. See the option `[python].lockfile_generator` for more information. This option is mutually exclusive with `[python].requirement_constraints`. We strongly recommend using this option because it: 1. Uses `--hash` to validate that all downloaded files are expected, which reduces\ the risk of supply chain attacks. 2. Enforces that all transitive dependencies are in the lockfile, whereas\ constraints allow you to leave off dependencies. This ensures your build is more\ stable and reduces the risk of supply chain attacks. 3. Allows you to have multiple lockfiles in your repository. """ ), advanced=True, mutually_exclusive_group="lockfile", ) resolves = DictOption[str]( "--resolves", default={"python-default": "3rdparty/python/default.lock"}, help=softwrap( f""" A mapping of logical names to lockfile paths used in your project. Many organizations only need a single resolve for their whole project, which is a good default and often the simplest thing to do. However, you may need multiple resolves, such as if you use two conflicting versions of a requirement in your repository. If you only need a single resolve, run `{bin_name()} generate-lockfiles` to generate the lockfile. If you need multiple resolves: 1. Via this option, define multiple resolve names and their lockfile paths.\ The names should be meaningful to your repository, such as `data-science` or\ `pants-plugins`. 2. Set the default with `[python].default_resolve`. 3. Update your `python_requirement` targets with the `resolve` field to declare which\ resolve they should be available in. They default to `[python].default_resolve`,\ so you only need to update targets that you want in non-default resolves.\ (Often you'll set this via the `python_requirements` or `poetry_requirements`\ target generators) 4. Run `{bin_name()} generate-lockfiles` to generate the lockfiles. If the results\ aren't what you'd expect, adjust the prior step. 5. Update any targets like `python_source` / `python_sources`,\ `python_test` / `python_tests`, and `pex_binary` which need to set a non-default\ resolve with the `resolve` field. If a target can work with multiple resolves, you can either use the `parametrize` mechanism or manually create a distinct target per resolve. See {doc_url("targets")} for information about `parametrize`. For example: python_sources( resolve=parametrize("data-science", "web-app"), ) You can name the lockfile paths what you would like; Pants does not expect a certain file extension or location. Only applies if `[python].enable_resolves` is true. """ ), advanced=True, ) default_resolve = StrOption( "--default-resolve", default="python-default", help=softwrap( """ The default value used for the `resolve` field. The name must be defined as a resolve in `[python].resolves`. """ ), advanced=True, ) _resolves_to_interpreter_constraints = DictOption["list[str]"]( "--resolves-to-interpreter-constraints", help=softwrap( """ Override the interpreter constraints to use when generating a resolve's lockfile with the `generate-lockfiles` goal. By default, each resolve from `[python].resolves` will use your global interpreter constraints set in `[python].interpreter_constraints`. With this option, you can override each resolve to use certain interpreter constraints, such as `{'data-science': ['==3.8.*']}`. Warning: this does NOT impact the interpreter constraints used by targets within the resolve, which is instead set by the option `[python.interpreter_constraints` and the `interpreter_constraints` field. It only impacts how the lockfile is generated. Pants will validate that the interpreter constraints of your code using a resolve are compatible with that resolve's own constraints. For example, if your code is set to use ['==3.9.*'] via the `interpreter_constraints` field, but it's using a resolve whose interpreter constraints are set to ['==3.7.*'], then Pants will error explaining the incompatibility. The keys must be defined as resolves in `[python].resolves`. """ ), advanced=True, ) invalid_lockfile_behavior = EnumOption( "--invalid-lockfile-behavior", default=InvalidLockfileBehavior.error, help=softwrap( """ The behavior when a lockfile has requirements or interpreter constraints that are not compatible with what the current build is using. We recommend keeping the default of `error` for CI builds. Note that `warn` will still expect a Pants lockfile header, it only won't error if the lockfile is stale and should be regenerated. Use `ignore` to avoid needing a lockfile header at all, e.g. if you are manually managing lockfiles rather than using the `generate-lockfiles` goal. """ ), advanced=True, ) _lockfile_generator = EnumOption( "--lockfile-generator", default=LockfileGenerator.PEX, help=softwrap( f""" Whether to use Pex or Poetry with the `generate-lockfiles` goal. Poetry does not support these features: 1) `[python-repos]` for custom indexes/cheeseshops. 2) VCS (Git) requirements. 3) `[GLOBAL].ca_certs_path`. If you use any of these features, you should use Pex. Several users have also had issues with how Poetry's lockfile generation handles environment markers for transitive dependencies; certain dependencies end up with nonsensical environment markers which cause the dependency to not be installed, then for Pants/Pex to complain the dependency is missing, even though it's in the lockfile. There is a workaround: for `[python].resolves`, manually create a `python_requirement` target for the problematic transitive dependencies so that they are seen as direct requirements, rather than transitive. For tool lockfiles, add the problematic transitive dependency to `[tool].extra_requirements`, e.g. `[isort].extra_requirements`. Then, regenerate the lockfile(s) with the `generate-lockfiles` goal. Alternatively, use Pex for generation. Finally, installing from a Poetry-generated lockfile is slower than installing from a Pex lockfile. When using a Pex lockfile, Pants will only install the subset needed for the current task. However, Pex lockfile generation is a new feature. Given how vast the Python packaging ecosystem is, it is possible you may experience edge cases / bugs we haven't yet covered. Bug reports are appreciated! https://github.com/pantsbuild/pants/issues/new/choose Note that while Pex generates locks in a proprietary JSON format, you can use the `{bin_name()} export` goal for Pants to create a virtual environment for interoperability with tools like IDEs. """ ), advanced=True, ) resolves_generate_lockfiles = BoolOption( "--resolves-generate-lockfiles", default=True, help=softwrap( """ If False, Pants will not attempt to generate lockfiles for `[python].resolves` when running the `generate-lockfiles` goal. This is intended to allow you to manually generate lockfiles as a workaround for the issues described in the `[python].lockfile_generator` option, if you are not yet ready to use Pex. If you set this to False, Pants will not attempt to validate the metadata headers for your user lockfiles. This is useful so that you can keep `[python].invalid_lockfile_behavior` to `error` or `warn` if you'd like so that tool lockfiles continue to be validated, while user lockfiles are skipped. """ ), advanced=True, ) run_against_entire_lockfile = BoolOption( "--run-against-entire-lockfile", default=False, help=softwrap( """ If enabled, when running binaries, tests, and repls, Pants will use the entire lockfile file instead of just the relevant subset. We generally do not recommend this if `[python].lockfile_generator` is set to `"pex"` thanks to performance enhancements we've made. When using Pex lockfiles, you should get similar performance to using this option but without the downsides mentioned below. Otherwise, if not using Pex lockfiles, this option can improve performance and reduce cache size. But it has two consequences: 1) All cached test results will be invalidated if any requirement in the lockfile changes, rather than just those that depend on the changed requirement. 2) Requirements unneeded by a test/run/repl will be present on the sys.path, which might in rare cases cause their behavior to change. This option does not affect packaging deployable artifacts, such as PEX files, wheels and cloud functions, which will still use just the exact subset of requirements needed. """ ), advanced=True, ) requirement_constraints = FileOption( "--requirement-constraints", default=None, help=softwrap( """ When resolving third-party requirements for your own code (vs. tools you run), use this constraints file to determine which versions to use. Mutually exclusive with `[python].enable_resolves`, which we generally recommend as an improvement over constraints file. See https://pip.pypa.io/en/stable/user_guide/#constraints-files for more information on the format of constraint files and how constraints are applied in Pex and pip. This only applies when resolving user requirements, rather than tools you run like Black and Pytest. To constrain tools, set `[tool].lockfile`, e.g. `[black].lockfile`. """ ), advanced=True, mutually_exclusive_group="lockfile", ) resolve_all_constraints = BoolOption( "--resolve-all-constraints", default=True, help=softwrap( """ (Only relevant when using `[python].requirement_constraints.`) If enabled, when resolving requirements, Pants will first resolve your entire constraints file as a single global resolve. Then, if the code uses a subset of your constraints file, Pants will extract the relevant requirements from that global resolve so that only what's actually needed gets used. If disabled, Pants will not use a global resolve and will resolve each subset of your requirements independently. Usually this option should be enabled because it can result in far fewer resolves. """ ), advanced=True, ) no_binary = StrListOption( "--no-binary", help=softwrap( """ Do not use binary packages (i.e., wheels) for these 3rdparty projects. Also accepts `:all:` to disable all binary packages. Note that some packages are tricky to compile and may fail to install when this option is used on them. See https://pip.pypa.io/en/stable/cli/pip_install/#install-no-binary for details. Note: Only takes effect if you use Pex lockfiles. Set `[python].lockfile_generator = "pex"` and run the `generate-lockfiles` goal. """ ), ) only_binary = StrListOption( "--only-binary", help=softwrap( """ Do not use source packages (i.e., sdists) for these 3rdparty projects. Also accepts `:all:` to disable all source packages. Packages without binary distributions will fail to install when this option is used on them. See https://pip.pypa.io/en/stable/cli/pip_install/#install-only-binary for details. Note: Only takes effect if you use Pex lockfiles. Set `[python].lockfile_generator = "pex"` and run the `generate-lockfiles` goal. """ ), ) resolver_manylinux = StrOption( "--resolver-manylinux", default="manylinux2014", help=softwrap( """ Whether to allow resolution of manylinux wheels when resolving requirements for foreign linux platforms. The value should be a manylinux platform upper bound, e.g.: 'manylinux2010', or else the string 'no' to disallow. """ ), advanced=True, ) tailor_source_targets = BoolOption( "--tailor-source-targets", default=True, help=softwrap( """ If true, add `python_sources`, `python_tests`, and `python_test_utils` targets with the `tailor` goal.""" ), advanced=True, ) tailor_ignore_solitary_init_files = BoolOption( "--tailor-ignore-solitary-init-files", default=True, help=softwrap( """ If true, don't add `python_sources` targets for solitary `__init__.py` files with the `tailor` goal. Solitary `__init__.py` files usually exist as import scaffolding rather than true library code, so it can be noisy to add BUILD files. Set to false if you commonly have packages containing real code in `__init__.py` without other `.py` files in the package. """ ), advanced=True, ) tailor_requirements_targets = BoolOption( "--tailor-requirements-targets", default=True, help=softwrap( """ If true, add `python_requirements`, `poetry_requirements`, and `pipenv_requirements` target generators with the `tailor` goal. `python_requirements` targets are added for any file that matches the pattern `*requirements*.txt`. You will need to manually add `python_requirements` for different file names like `reqs.txt`. `poetry_requirements` targets are added for `pyproject.toml` files with `[tool.poetry` in them. """ ), advanced=True, ) tailor_pex_binary_targets = BoolOption( "--tailor-pex-binary-targets", default=True, help=softwrap( """ If true, add `pex_binary` targets for Python files named `__main__.py` or with a `__main__` clause with the `tailor` goal. """ ), advanced=True, ) macos_big_sur_compatibility = BoolOption( "--macos-big-sur-compatibility", default=False, help=softwrap( """ If set, and if running on MacOS Big Sur, use macosx_10_16 as the platform when building wheels. Otherwise, the default of macosx_11_0 will be used. This may be required for pip to be able to install the resulting distribution on Big Sur. """ ), advanced=True, ) @property def generate_lockfiles_with_pex(self) -> bool: """Else, generate with Poetry.""" return self._lockfile_generator == LockfileGenerator.PEX @memoized_property def resolves_to_interpreter_constraints(self) -> dict[str, tuple[str, ...]]: result = {} for resolve, ics in self._resolves_to_interpreter_constraints.items(): if resolve not in self.resolves: raise KeyError( softwrap( f""" Unrecognized resolve name in the option `[python].resolves_to_interpreter_constraints`: {resolve}. Each key must be one of the keys in `[python].resolves`: {sorted(self.resolves.keys())} """ ) ) result[resolve] = tuple(ics) return result def resolve_all_constraints_was_set_explicitly(self) -> bool: return not self.options.is_default("resolve_all_constraints") @property def manylinux(self) -> str | None: manylinux = cast(Optional[str], self.resolver_manylinux) if manylinux is None or manylinux.lower() in ("false", "no", "none"): return None return manylinux @property def manylinux_pex_args(self) -> Iterator[str]: if self.manylinux: yield "--manylinux" yield self.manylinux else: yield "--no-manylinux" @property def scratch_dir(self): return os.path.join(self.options.pants_workdir, *self.options_scope.split(".")) def compatibility_or_constraints(self, compatibility: Iterable[str] | None) -> tuple[str, ...]: """Return either the given `compatibility` field or the global interpreter constraints. If interpreter constraints are supplied by the CLI flag, return those only. """ if self.options.is_flagged("interpreter_constraints"): return self.interpreter_constraints return tuple(compatibility or self.interpreter_constraints) def compatibilities_or_constraints( self, compatibilities: Iterable[Iterable[str] | None] ) -> tuple[str, ...]: return tuple( constraint for compatibility in compatibilities for constraint in self.compatibility_or_constraints(compatibility) )
class DockerOptions(Subsystem): options_scope = "docker" help = "Options for interacting with Docker." _registries = DictOption[Any]( "--registries", help=softwrap(""" Configure Docker registries. The schema for a registry entry is as follows: { "registry-alias": { "address": "registry-domain:port", "default": bool, }, ... } If no registries are provided in a `docker_image` target, then all default addresses will be used, if any. The `docker_image.registries` may be provided with a list of registry addresses and registry aliases prefixed with `@` to be used instead of the defaults. A configured registry is marked as default either by setting `default = true` or with an alias of `"default"`. """), fromfile=True, ) default_repository = StrOption( "--default-repository", help=(softwrap(f""" Configure the default repository name used in the Docker image tag. The value is formatted and may reference these variables (in addition to the normal placeheolders derived from the Dockerfile and build args etc): {bullet_list(["name", "directory", "parent_directory"])} Example: `--default-repository="{{directory}}/{{name}}"`. The `name` variable is the `docker_image`'s target name, `directory` and `parent_directory` are the name of the directory in which the BUILD file is for the target, and its parent directory respectively. Use the `repository` field to set this value directly on a `docker_image` target. Any registries or tags are added to the image name as required, and should not be part of the repository name. """)), default="{name}", ) default_context_root = WorkspacePathOption( "--default-context-root", default="", help=softwrap(""" Provide a default Docker build context root path for `docker_image` targets that does not specify their own `context_root` field. The context root is relative to the build root by default, but may be prefixed with `./` to be relative to the directory of the BUILD file of the `docker_image`. Examples: --default-context-root=src/docker --default-context-root=./relative_to_the_build_file """), ) _build_args = ShellStrListOption( "--build-args", help=softwrap(f""" Global build arguments (for Docker `--build-arg` options) to use for all `docker build` invocations. Entries are either strings in the form `ARG_NAME=value` to set an explicit value; or just `ARG_NAME` to copy the value from Pants's own environment. Example: [{options_scope}] build_args = ["VAR1=value", "VAR2"] Use the `extra_build_args` field on a `docker_image` target for additional image specific build arguments. """), ) build_target_stage = StrOption( "--build-target-stage", default=None, help=softwrap(""" Global default value for `target_stage` on `docker_image` targets, overriding the field value on the targets, if there is a matching stage in the `Dockerfile`. This is useful to provide from the command line, to specify the target stage to build for at execution time. """), ) build_verbose = BoolOption( "--build-verbose", default=False, help= "Whether to log the Docker output to the console. If false, only the image ID is logged.", ) _env_vars = ShellStrListOption( "--env-vars", help=softwrap(""" Environment variables to set for `docker` invocations. Entries are either strings in the form `ENV_VAR=value` to set an explicit value; or just `ENV_VAR` to copy the value from Pants's own environment. """), advanced=True, ) run_args = ShellStrListOption( "--run-args", default=["--interactive", "--tty"] if sys.stdout.isatty() else [], help=softwrap(f""" Additional arguments to use for `docker run` invocations. Example: $ {bin_name()} run --{options_scope}-run-args="-p 127.0.0.1:80:8080/tcp\ --name demo" src/example:image -- [image entrypoint args] To provide the top-level options to the `docker` client, use `[{options_scope}].env_vars` to configure the [Environment variables]({doc_links['docker_env_vars']}) as appropriate. The arguments for the image entrypoint may be passed on the command line after a double dash (`--`), or using the `--run-args` option. Defaults to `--interactive --tty` when stdout is connected to a terminal. """), ) _executable_search_paths = StrListOption( "--executable-search-paths", default=["<PATH>"], help=softwrap(""" The PATH value that will be used to find the Docker client and any tools required. The special string `"<PATH>"` will expand to the contents of the PATH env var. """), advanced=True, metavar="<binary-paths>", ) _tools = StrListOption( "--tools", default=[], help=softwrap(""" List any additional executable tools required for Docker to work. The paths to these tools will be included in the PATH used in the execution sandbox, so that they may be used by the Docker client. """), advanced=True, ) @property def build_args(self) -> tuple[str, ...]: return tuple(sorted(set(self._build_args))) @property def env_vars(self) -> tuple[str, ...]: return tuple(sorted(set(self._env_vars))) @property def tools(self) -> tuple[str, ...]: return tuple(sorted(set(self._tools))) @memoized_method def registries(self) -> DockerRegistries: return DockerRegistries.from_dict(self._registries) @memoized_method def executable_search_path(self, env: Environment) -> tuple[str, ...]: def iter_path_entries(): for entry in self._executable_search_paths: if entry == "<PATH>": path = env.get("PATH") if path: yield from path.split(os.pathsep) else: yield entry return tuple(OrderedSet(iter_path_entries()))
class TailorSubsystem(GoalSubsystem): name = "tailor" help = "Auto-generate BUILD file targets for new source files." @classmethod def activated(cls, union_membership: UnionMembership) -> bool: return PutativeTargetsRequest in union_membership check = BoolOption( "--check", default=False, help=( "Do not write changes to disk, only write back what would change. Return code " "0 means there would be no changes, and 1 means that there would be. " ), ) build_file_name = StrOption( "--build-file-name", default="BUILD", help=( "The name to use for generated BUILD files.\n\n" "This must be compatible with `[GLOBAL].build_patterns`." ), advanced=True, ) build_file_header = StrOption( "--build-file-header", default=None, help="A header, e.g., a copyright notice, to add to the content of created BUILD files.", advanced=True, ) build_file_indent = StrOption( "--build-file-indent", default=" ", help="The indent to use when auto-editing BUILD files.", advanced=True, ) _alias_mapping = DictOption[str]( "--alias-mapping", help=( "A mapping from standard target type to custom type to use instead. The custom " "type can be a custom target type or a macro that offers compatible functionality " f"to the one it replaces (see {doc_url('macros')})." ), advanced=True, ) ignore_paths = StrListOption( "--ignore-paths", help=( "Do not edit or create BUILD files at these paths.\n\n" "Can use literal file names and/or globs, e.g. " "`['project/BUILD, 'ignore_me/**']`.\n\n" "This augments the option `[GLOBAL].build_ignore`, which tells Pants to also not " "_read_ BUILD files at certain paths. In contrast, this option only tells Pants to " "not edit/create BUILD files at the specified paths." ), advanced=True, ) _ignore_adding_targets = StrListOption( "--ignore-adding-targets", help=( "Do not add these target definitions.\n\n" "Expects a list of target addresses that would normally be added by `tailor`, " "e.g. `['project:tgt']`. To find these names, you can run `tailor --check`, then " "combine the BUILD file path with the target's name. For example, if `tailor` " "would add the target `bin` to `project/BUILD`, then the address would be " "`project:bin`. If the BUILD file is at the root of your repository, use `//` for " "the path, e.g. `//:bin`.\n\n" "Does not work with macros." ), advanced=True, ) @property def ignore_adding_targets(self) -> set[str]: return set(self._ignore_adding_targets) def alias_for(self, standard_type: str) -> str | None: # The get() could return None, but casting to str | None errors. # This cast suffices to avoid typecheck errors. return cast(str, self._alias_mapping.get(standard_type)) def validate_build_file_name(self, build_file_patterns: tuple[str, ...]) -> None: """Check that the specified BUILD file name works with the repository's BUILD file patterns.""" filespec = Filespec(includes=list(build_file_patterns)) if not bool(matches_filespec(filespec, paths=[self.build_file_name])): raise ValueError( f"The option `[{self.options_scope}].build_file_name` is set to " f"`{self.build_file_name}`, which is not compatible with " f"`[GLOBAL].build_patterns`: {sorted(build_file_patterns)}. This means that " "generated BUILD files would be ignored.\n\n" "To fix, please update the options so that they are compatible." ) def filter_by_ignores( self, putative_targets: Iterable[PutativeTarget], build_file_ignores: tuple[str, ...] ) -> Iterator[PutativeTarget]: ignore_paths_filespec = Filespec(includes=[*self.ignore_paths, *build_file_ignores]) for ptgt in putative_targets: is_ignored_file = bool( matches_filespec( ignore_paths_filespec, paths=[os.path.join(ptgt.path, self.build_file_name)], ) ) if is_ignored_file: continue if ptgt.addressable: # Note that `tailor` can only generate explicit targets, so we don't need to # worry about generated address syntax (`#`) or file address syntax. address = f"{ptgt.path or '//'}:{ptgt.name}" if address in self.ignore_adding_targets: continue yield ptgt
class MyPy(PythonToolBase): options_scope = "mypy" name = "MyPy" help = "The MyPy Python type checker (http://mypy-lang.org/)." default_version = "mypy==0.910" default_main = ConsoleScript("mypy") # See `mypy/rules.py`. We only use these default constraints in some situations. register_interpreter_constraints = True default_interpreter_constraints = ["CPython>=3.7,<4"] register_lockfile = True default_lockfile_resource = ("pants.backend.python.typecheck.mypy", "mypy.lock") default_lockfile_path = "src/python/pants/backend/python/typecheck/mypy/mypy.lock" default_lockfile_url = git_url(default_lockfile_path) uses_requirements_from_source_plugins = True skip = SkipOption("check") args = ArgsListOption(example="--python-version 3.7 --disallow-any-expr") config = FileOption( "--config", default=None, advanced=True, help=lambda cls: ("Path to a config file understood by MyPy " "(https://mypy.readthedocs.io/en/stable/config_file.html).\n\n" f"Setting this option will disable `[{cls.options_scope}].config_discovery`. Use " f"this option if the config is located in a non-standard location."), ) config_discovery = BoolOption( "--config-discovery", default=True, advanced=True, help=lambda cls: ("If true, Pants will include any relevant config files during " "runs (`mypy.ini`, `.mypy.ini`, and `setup.cfg`)." f"\n\nUse `[{cls.options_scope}].config` instead if your config is in a " f"non-standard location."), ) _source_plugins = TargetListOption( "--source-plugins", advanced=True, help= ("An optional list of `python_sources` target addresses to load first-party " "plugins.\n\n" "You must also set `plugins = path.to.module` in your `mypy.ini`, and " "set the `[mypy].config` option in your `pants.toml`.\n\n" "To instead load third-party plugins, set the option `[mypy].extra_requirements` " "and set the `plugins` option in `mypy.ini`." "Tip: it's often helpful to define a dedicated 'resolve' via " "`[python].resolves` for your MyPy plugins such as 'mypy-plugins' " "so that the third-party requirements used by your plugin, like `mypy`, do not " "mix with the rest of your project. Read that option's help message for more info " "on resolves."), ) extra_type_stubs = StrListOption( "--extra-type-stubs", advanced=True, help= ("Extra type stub requirements to install when running MyPy.\n\n" "Normally, type stubs can be installed as typical requirements, such as putting " "them in `requirements.txt` or using a `python_requirement` target." "Alternatively, you can use this option so that the dependencies are solely " "used when running MyPy and are not runtime dependencies.\n\n" "Expects a list of pip-style requirement strings, like " "`['types-requests==2.25.9']`."), ) @property def config_request(self) -> ConfigFilesRequest: # Refer to https://mypy.readthedocs.io/en/stable/config_file.html. return ConfigFilesRequest( specified=self.config, specified_option_name=f"{self.options_scope}.config", discovery=self.config_discovery, check_existence=["mypy.ini", ".mypy.ini"], check_content={ "setup.cfg": b"[mypy", "pyproject.toml": b"[tool.mypy" }, ) @property def source_plugins(self) -> UnparsedAddressInputs: return UnparsedAddressInputs(self._source_plugins, owning_address=None) def check_and_warn_if_python_version_configured( self, config: FileContent | None) -> bool: """Determine if we can dynamically set `--python-version` and warn if not.""" configured = [] if config and b"python_version" in config.content: configured.append( f"`python_version` in {config.path} (which is used because of either config " "discovery or the `[mypy].config` option)") if "--py2" in self.args: configured.append("`--py2` in the `--mypy-args` option") if any(arg.startswith("--python-version") for arg in self.args): configured.append("`--python-version` in the `--mypy-args` option") if configured: formatted_configured = " and you set ".join(configured) logger.warning( f"You set {formatted_configured}. Normally, Pants would automatically set this " "for you based on your code's interpreter constraints " f"({doc_url('python-interpreter-compatibility')}). Instead, it will " "use what you set.\n\n" "(Automatically setting the option allows Pants to partition your targets by their " "constraints, so that, for example, you can run MyPy on Python 2-only code and " "Python 3-only code at the same time. This feature may no longer work.)" ) return bool(configured)