Beispiel #1
0
    class MyBaseSubsystem(Subsystem):
        def __init__(self):
            self.options = SimpleNamespace()
            self.options.enum_opt = MyEnum.Val2
            self.options.optional_enum_opt = MyEnum.Val2
            self.options.dyn_enum_opt = MyEnum.Val2
            self.options.enum_list_opt = [MyEnum.Val2]
            self.options.dyn_enum_list_opt = [MyEnum.Val2]
            self.options.defaultless_enum_list_opt = [MyEnum.Val2]
            self.options.dict_opt = {"key1": "val1"}

        enum_prop = EnumOption("--enum-opt", default=MyEnum.Val1, help="")
        dyn_enum_prop = EnumOption(
            "--dyn-enum-opt",
            default=lambda cls: cls.dyn_default,
            enum_type=MyEnum,
            help=lambda cls: f"{cls.dyn_help}",
        )
        optional_enum_prop = EnumOption(
            "--optional-enum-opt", enum_type=MyEnum, default=None, help=""
        )
        enum_list_prop = EnumListOption("--enum-list-opt", default=[MyEnum.Val1], help="")
        dyn_enum_list_prop = EnumListOption(
            "--dyn-enum-list-opt",
            enum_type=MyEnum,
            default=lambda cls: cls.dyn_default_list,
            help=lambda cls: f"{cls.dyn_help}",
        )
        defaultless_enum_list_prop = EnumListOption(
            "--defaultless-enum-list-opt", enum_type=MyEnum, help=""
        )
        dict_prop = DictOption[Any]("--dict-opt", help="")
Beispiel #2
0
class FilterSubsystem(LineOriented, GoalSubsystem):
    name = "filter"
    help = (
        "Filter the input targets based on various criteria.\n\nMost of the filtering options "
        "below are comma-separated lists of filtering criteria, with an implied logical OR between "
        "them, so that a target passes the filter if it matches any of the criteria in the list. "
        "A '-' prefix inverts the sense of the entire comma-separated list, so that a target "
        "passes the filter only if it matches none of the criteria in the list.\n\nEach of the "
        "filtering options may be specified multiple times, with an implied logical AND between "
        "them.")

    target_type = StrListOption(
        "--target-type",
        metavar="[+-]type1,type2,...",
        help=
        "Filter on these target types, e.g. `resources` or `python_sources`.",
    )
    granularity = EnumOption(
        "--granularity",
        default=TargetGranularity.all_targets,
        help=
        ("Filter to rendering only targets declared in BUILD files, only file-level "
         "targets, or all targets."),
    )
    address_regex = StrListOption(
        "--address-regex",
        metavar="[+-]regex1,regex2,...",
        help="Filter on target addresses matching these regexes.",
    )
    tag_regex = StrListOption(
        "--tag-regex",
        metavar="[+-]regex1,regex2,...",
        help="Filter on targets with tags matching these regexes.",
    )
Beispiel #3
0
class RegexLintSubsystem(Subsystem):
    options_scope = "regex-lint"
    help = softwrap("""
        Lint your code using regex patterns, e.g. to check for copyright headers.

        To activate this with the `lint` goal, you must set `[regex-lint].config`.

        Unlike other linters, this can run on files not owned by targets, such as BUILD files.
        """)

    _config = DictOption[Any](
        "--config",
        help=softwrap("""
            Config schema is as follows:

                ```
                {
                'required_matches': {
                    'path_pattern1': [content_pattern1, content_pattern2],
                    'path_pattern2': [content_pattern1, content_pattern3],
                    ...
                },
                'path_patterns': [
                    {
                    'name': path_pattern1',
                    'pattern': <path regex pattern>,
                    'inverted': True|False (defaults to False),
                    'content_encoding': <encoding> (defaults to utf8)
                    },
                    ...
                ],
                'content_patterns': [
                    {
                    'name': 'content_pattern1',
                    'pattern': <content regex pattern>,
                    'inverted': True|False (defaults to False)
                    }
                    ...
                ]
                }
                ```

            Meaning: if a file matches some path pattern, its content must match all the
            corresponding content patterns.

            It's often helpful to load this config from a JSON or YAML file. To do that, set
            `[regex-lint].config = '@path/to/config.yaml'`, for example.
            """),
        fromfile=True,
    )
    detail_level = EnumOption(
        "--detail-level",
        default=DetailLevel.nonmatching,
        help="How much detail to include in the result.",
    )

    @memoized_method
    def get_multi_matcher(self) -> MultiMatcher | None:
        return MultiMatcher(ValidationConfig.from_dict(
            self._config)) if self._config else None
Beispiel #4
0
class HelmUnitTestSubsystem(ExternalHelmPlugin):
    options_scope = "helm-unittest"
    plugin_name = "unittest"
    help = "BDD styled unit test framework for Kubernetes Helm charts as a Helm plugin. (https://github.com/quintush/helm-unittest)"

    default_version = "0.2.8"
    default_known_versions = [
        "0.2.8|linux_x86_64|d7c452559ad4406a1197435394fbcffe51198060de1aa9b4cb6feaf876776ba0|18299096",
        "0.2.8|linux_arm64 |c793e241b063f0540ad9b4acc0a02e5a101bd9daea5bdf4d8562e9b2337fedb2|16943867",
        "0.2.8|macos_x86_64|1dc95699320894bdebf055c4f4cc084c2cfa0133d3cb7fd6a4c0adca94df5c96|18161928",
        "0.2.8|macos_arm64 |436e3167c26f71258b96e32c2877b4f97c051064db941de097cf3db2fc861342|17621648",
    ]
    default_url_template = "https://github.com/quintush/helm-unittest/releases/download/v{version}/helm-unittest-{platform}-{version}.tgz"
    default_url_platform_mapping = {
        "linux_arm64": "linux-arm64",
        "linux_x86_64": "linux-amd64",
        "macos_arm64": "macos-arm64",
        "macos_x86_64": "macos-amd64",
    }

    output_type = EnumOption(
        "--output-type",
        default=HelmUnitTestReportFormat.XUNIT,
        help="Output type used for the test report",
    )

    def generate_exe(self, _: Platform) -> str:
        return "./untt"
Beispiel #5
0
class TestSubsystem(GoalSubsystem):
    name = "test"
    help = "Run tests."

    # Prevent this class from being detected by pytest as a test class.
    __test__ = False

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return TestFieldSet in union_membership

    debug = BoolOption(
        "--debug",
        default=False,
        help=
        ("Run tests sequentially in an interactive process. This is necessary, for "
         "example, when you add breakpoints to your code."),
    )
    force = BoolOption(
        "--force",
        default=False,
        help=
        "Force the tests to run, even if they could be satisfied from cache.",
    )
    output = EnumOption(
        "--output",
        default=ShowOutput.FAILED,
        help="Show stdout/stderr for these tests.",
    )
    use_coverage = BoolOption(
        "--use-coverage",
        default=False,
        help="Generate a coverage report if the test runner supports it.",
    )
    open_coverage = BoolOption(
        "--open-coverage",
        default=False,
        help=
        ("If a coverage report file is generated, open it on the local system if the "
         "system supports this."),
    )
    xml_dir = StrOption(
        "--xml-dir",
        metavar="<DIR>",
        default=None,
        advanced=True,
        help=
        ("Specifying a directory causes Junit XML result files to be emitted under "
         "that dir for each test run that supports producing them."),
    )
    extra_env_vars = StrListOption(
        "--extra-env-vars",
        help=
        ("Additional environment variables to include in test processes. "
         "Entries are strings in the form `ENV_VAR=value` to use explicitly; or just "
         "`ENV_VAR` to copy the value of a variable in Pants's own environment."
         ),
    )
class UpdateBuildFilesSubsystem(GoalSubsystem):
    name = "update-build-files"
    help = (
        "Format and fix safe deprecations in BUILD files.\n\n"
        "This does not handle the full Pants upgrade. You must still manually change "
        "`pants_version` in `pants.toml` and you may need to manually address some deprecations. "
        f"See {doc_url('upgrade-tips')} for upgrade tips.\n\n"
        "This goal is run without arguments. It will run over all BUILD files in your "
        "project.")

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return RewrittenBuildFileRequest in union_membership

    check = BoolOption(
        "--check",
        default=False,
        help=
        ("Do not write changes to disk, only write back what would change. Return code "
         "0 means there would be no changes, and 1 means that there would be. "
         ),
    )
    fmt = BoolOption(
        "--fmt",
        default=True,
        help=
        ("Format BUILD files using Black or Yapf.\n\n"
         "Set `[black].args` / `[yapf].args`, `[black].config` / `[yapf].config` , "
         "and `[black].config_discovery` / `[yapf].config_discovery` to change "
         "Black's or Yapf's behavior. Set "
         "`[black].interpreter_constraints` / `[yapf].interpreter_constraints` "
         "and `[python].interpreter_search_path` to change which interpreter is "
         "used to run the formatter."),
    )
    formatter = EnumOption(
        "--formatter",
        default=Formatter.BLACK,
        help="Which formatter Pants should use to format BUILD files.",
    )
    fix_safe_deprecations = BoolOption(
        "--fix-safe-deprecations",
        default=True,
        help=
        ("Automatically fix deprecations, such as target type renames, that are safe "
         "because they do not change semantics."),
    )
    fix_python_macros = BoolOption(
        "--fix-python-macros",
        default=False,
        help=
        ("Update references to targets generated from `python_requirements` and "
         "`poetry_requirements` from the old deprecated macro mechanism to the new target "
         f"generation mechanism described at {doc_url('targets#target-generation')}.\n\n"
         ),
    )
Beispiel #7
0
class UpdateBuildFilesSubsystem(GoalSubsystem):
    name = "update-build-files"
    help = softwrap(f"""
        Format and fix safe deprecations in BUILD files.

        This does not handle the full Pants upgrade. You must still manually change
        `pants_version` in `pants.toml` and you may need to manually address some deprecations.
        See {doc_url('upgrade-tips')} for upgrade tips.

        This goal is run without arguments. It will run over all BUILD files in your
        project.
        """)

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return RewrittenBuildFileRequest in union_membership

    check = BoolOption(
        "--check",
        default=False,
        help=softwrap("""
            Do not write changes to disk, only write back what would change. Return code
            0 means there would be no changes, and 1 means that there would be.
            """),
    )
    fmt = BoolOption(
        "--fmt",
        default=True,
        help=softwrap("""
            Format BUILD files using Black or Yapf.

            Set `[black].args` / `[yapf].args`, `[black].config` / `[yapf].config` ,
            and `[black].config_discovery` / `[yapf].config_discovery` to change
            Black's or Yapf's behavior. Set
            `[black].interpreter_constraints` / `[yapf].interpreter_constraints`
            and `[python].interpreter_search_path` to change which interpreter is
            used to run the formatter.
            """),
    )
    formatter = EnumOption(
        "--formatter",
        default=Formatter.BLACK,
        help="Which formatter Pants should use to format BUILD files.",
    )
    fix_safe_deprecations = BoolOption(
        "--fix-safe-deprecations",
        default=True,
        help=softwrap("""
            Automatically fix deprecations, such as target type renames, that are safe
            because they do not change semantics.
            """),
    )
Beispiel #8
0
class SetupPyGeneration(Subsystem):
    options_scope = "setup-py-generation"
    help = "Options to control how setup.py is generated from a `python_distribution` target."

    # Generating setup is the more aggressive thing to do, so we'd prefer that the default
    # be False. However that would break widespread existing usage, so we'll make that
    # change in a future deprecation cycle.
    generate_setup_default = BoolOption(
        "--generate-setup-default",
        default=True,
        help=softwrap(
            """
            The default value for the `generate_setup` field on `python_distribution` targets.
            Can be overridden per-target by setting that field explicitly. Set this to False
            if you mostly rely on handwritten setup files (setup.py, setup.cfg and similar).
            Leave as True if you mostly rely on Pants generating setup files for you.
            """
        ),
    )

    first_party_dependency_version_scheme = EnumOption(
        "--first-party-dependency-version-scheme",
        default=FirstPartyDependencyVersionScheme.EXACT,
        help=softwrap(
            """
            What version to set in `install_requires` when a `python_distribution` depends on
            other `python_distribution`s. If `exact`, will use `==`. If `compatible`, will
            use `~=`. If `any`, will leave off the version. See
            https://www.python.org/dev/peps/pep-0440/#version-specifiers.
            """
        ),
    )

    def first_party_dependency_version(self, version: str) -> str:
        """Return the version string (e.g. '~=4.0') for a first-party dependency.

        If the user specified to use "any" version, then this will return an empty string.
        """
        scheme = self.first_party_dependency_version_scheme
        if scheme == FirstPartyDependencyVersionScheme.ANY:
            return ""
        specifier = "==" if scheme == FirstPartyDependencyVersionScheme.EXACT else "~="
        return f"{specifier}{version}"
Beispiel #9
0
class Changed(Subsystem):
    options_scope = "changed"
    help = (
        "Tell Pants to detect what files and targets have changed from Git.\n\n"
        f"See {doc_url('advanced-target-selection')}."
    )

    since = StrOption(
        "--since",
        default=None,
        help="Calculate changes since this Git spec (commit range/SHA/ref).",
    )
    diffspec = StrOption(
        "--diffspec",
        default=None,
        help="Calculate changes contained within a given Git spec (commit range/SHA/ref).",
    )
    dependees = EnumOption(
        "--dependees",
        default=DependeesOption.NONE,
        help="Include direct or transitive dependees of changed targets.",
    )
Beispiel #10
0
class PythonInferSubsystem(Subsystem):
    options_scope = "python-infer"
    help = "Options controlling which dependencies will be inferred for Python targets."

    imports = BoolOption(
        "--imports",
        default=True,
        help=softwrap(
            """
            Infer a target's imported dependencies by parsing import statements from sources.

            To ignore a false positive, you can either put `# pants: no-infer-dep` on the line of
            the import or put `!{bad_address}` in the `dependencies` field of your target.
            """
        ),
    )
    string_imports = BoolOption(
        "--string-imports",
        default=False,
        help=softwrap(
            """
            Infer a target's dependencies based on strings that look like dynamic
            dependencies, such as Django settings files expressing dependencies as strings.

            To ignore a false positive, you can either put `# pants: no-infer-dep` on the line of
            the string or put `!{bad_address}` in the `dependencies` field of your target.
            """
        ),
    )
    string_imports_min_dots = IntOption(
        "--string-imports-min-dots",
        default=2,
        help=softwrap(
            """
            If --string-imports is True, treat valid-looking strings with at least this many
            dots in them as potential dynamic dependencies. E.g., `'foo.bar.Baz'` will be
            treated as a potential dependency if this option is set to 2 but not if set to 3.
            """
        ),
    )
    assets = BoolOption(
        "--assets",
        default=False,
        help=softwrap(
            """
            Infer a target's asset dependencies based on strings that look like Posix
            filepaths, such as those given to `open` or `pkgutil.get_data`.

            To ignore a false positive, you can either put `# pants: no-infer-dep` on the line of
            the string or put `!{bad_address}` in the `dependencies` field of your target.
            """
        ),
    )
    assets_min_slashes = IntOption(
        "--assets-min-slashes",
        default=1,
        help=softwrap(
            """
            If --assets is True, treat valid-looking strings with at least this many forward
            slash characters as potential assets. E.g. `'data/databases/prod.db'` will be
            treated as a potential candidate if this option is set to 2 but not to 3.
            """
        ),
    )

    init_files = EnumOption(
        "--init-files",
        help=softwrap(
            f"""
            Infer a target's dependencies on any `__init__.py` files in the packages
            it is located in (recursively upward in the directory structure).

            Even if this is set to `never` or `content_only`, Pants will still always include any
            ancestor `__init__.py` files in the sandbox. Only, they will not be "proper"
            dependencies, e.g. they will not show up in `{bin_name()} dependencies` and their own
            dependencies will not be used.

            By default, Pants only adds a "proper" dependency if there is content in the
            `__init__.py` file. This makes sure that dependencies are added when likely necessary
            to build, while also avoiding adding unnecessary dependencies. While accurate, those
            unnecessary dependencies can complicate setting metadata like the
            `interpreter_constraints` and `resolve` fields.
            """
        ),
        default=InitFilesInference.content_only,
    )
    inits = BoolOption(
        "--inits",
        default=False,
        help=softwrap(
            f"""
            Infer a target's dependencies on any `__init__.py` files in the packages
            it is located in (recursively upward in the directory structure).

            Even if this is disabled, Pants will still include any ancestor `__init__.py` files,
            only they will not be 'proper' dependencies, e.g. they will not show up in
            `{bin_name()} dependencies` and their own dependencies will not be used.

            If you have empty `__init__.py` files, it's safe to leave this option off; otherwise,
            you should enable this option.
            """
        ),
        removal_version="2.14.0.dev1",
        removal_hint=softwrap(
            """
            Use the more powerful option `[python-infer].init_files`. For identical
            behavior, set to 'always'. Otherwise, we recommend the default of `content_only`
            (simply delete the option `[python-infer].inits` to trigger the default).
            """
        ),
    )

    conftests = BoolOption(
        "--conftests",
        default=True,
        help=softwrap(
            """
            Infer a test target's dependencies on any conftest.py files in the current
            directory and ancestor directories.
            """
        ),
    )
    entry_points = BoolOption(
        "--entry-points",
        default=True,
        help=softwrap(
            """
            Infer dependencies on targets' entry points, e.g. `pex_binary`'s
            `entry_point` field, `python_awslambda`'s `handler` field and
            `python_distribution`'s `entry_points` field.
            """
        ),
    )
    unowned_dependency_behavior = EnumOption(
        "--unowned-dependency-behavior",
        default=UnownedDependencyUsage.DoNothing,
        help=softwrap(
            """
            How to handle imports that don't have an inferrable owner.

            Usually when an import cannot be inferred, it represents an issue like Pants not being
            properly configured, e.g. targets not set up. Often, missing dependencies will result
            in confusing runtime errors like `ModuleNotFoundError`, so this option can be helpful
            to error more eagerly.

            To ignore any false positives, either add `# pants: no-infer-dep` to the line of the
            import or put the import inside a `try: except ImportError:` block.
            """
        ),
    )
Beispiel #11
0
    class MySubsystem(Subsystem):
        def __init__(self):
            pass

        str_opt = StrOption("--opt", default="", help="")
        optional_str_opt = StrOption("--opt", default=None, help="")
        int_opt = IntOption("--opt", default=0, help="")
        optional_int_opt = IntOption("--opt", default=None, help="")
        float_opt = FloatOption("--opt", default=1.0, help="")
        optional_float_opt = FloatOption("--opt", default=None, help="")
        bool_opt = BoolOption("--opt", default=True, help="")
        optional_bool_opt = BoolOption("--opt", default=None, help="")
        target_opt = TargetOption("--opt", default="", help="")
        optional_target_opt = TargetOption("--opt", default=None, help="")
        dir_opt = DirOption("--opt", default="", help="")
        optional_dir_opt = DirOption("--opt", default=None, help="")
        file_opt = FileOption("--opt", default="", help="")
        optional_file_opt = FileOption("--opt", default=None, help="")
        shellstr_opt = ShellStrOption("--opt", default="", help="")
        optional_shellstr_opt = ShellStrOption("--opt", default=None, help="")
        memorysize_opt = MemorySizeOption("--opt", default=1, help="")
        optional_memorysize_opt = MemorySizeOption("--opt", default=None, help="")

        # List opts
        str_list_opt = StrListOption("--opt", help="")
        int_list_opt = IntListOption("--opt", help="")
        float_list_opt = FloatListOption("--opt", help="")
        bool_list_opt = BoolListOption("--opt", help="")
        target_list_opt = TargetListOption("--opt", help="")
        dir_list_opt = DirListOption("--opt", help="")
        file_list_opt = FileListOption("--opt", help="")
        shellstr_list_opt = ShellStrListOption("--opt", help="")
        memorysize_list_opt = MemorySizeListOption("--opt", help="")
        # And just test one dynamic default
        dyn_str_list_opt = StrListOption("--opt", default=lambda cls: cls.default, help="")

        # Enum opts
        enum_opt = EnumOption("--opt", default=MyEnum.Val1, help="")
        optional_enum_opt = EnumOption("--opt", enum_type=MyEnum, default=None, help="")
        dyn_enum_opt = EnumOption(
            "--opt", enum_type=MyEnum, default=lambda cls: cls.default, help=""
        )
        # mypy correctly complains about not matching any possibilities
        enum_opt_bad = EnumOption("--opt", default=None, help="")  # type: ignore[call-overload]
        enum_list_opt1 = EnumListOption("--opt", default=[MyEnum.Val1], help="")
        enum_list_opt2 = EnumListOption("--opt", enum_type=MyEnum, help="")
        dyn_enum_list_opt = EnumListOption(
            "--opt", enum_type=MyEnum, default=lambda cls: cls.default_list, help=""
        )
        # mypy correctly complains about needing a type annotation
        enum_list_bad_opt = EnumListOption("--opt", default=[], help="")  # type: ignore[var-annotated]

        # Dict opts
        dict_opt1 = DictOption[str]("--opt", help="")
        dict_opt2 = DictOption[Any]("--opt", default=dict(key="val"), help="")
        # mypy correctly complains about needing a type annotation
        dict_opt3 = DictOption("--opt", help="")  # type: ignore[var-annotated]
        dict_opt4 = DictOption("--opt", default={"key": "val"}, help="")
        dict_opt5 = DictOption("--opt", default=dict(key="val"), help="")
        dict_opt6 = DictOption("--opt", default=dict(key=1), help="")
        dict_opt7 = DictOption("--opt", default=dict(key1=1, key2="str"), help="")
        dyn_dict_opt = DictOption[str]("--opt", default=lambda cls: cls.default, help="")

        # Specialized Opts
        skip_opt = SkipOption("fmt")
        args_opt = ArgsListOption(example="--whatever")
Beispiel #12
0
class TestSubsystem(GoalSubsystem):
    name = "test"
    help = "Run tests."

    # Prevent this class from being detected by pytest as a test class.
    __test__ = False

    @classmethod
    def activated(cls, union_membership: UnionMembership) -> bool:
        return TestFieldSet in union_membership

    debug = BoolOption(
        "--debug",
        default=False,
        help=softwrap("""
            Run tests sequentially in an interactive process. This is necessary, for
            example, when you add breakpoints to your code.
            """),
    )
    force = BoolOption(
        "--force",
        default=False,
        help=
        "Force the tests to run, even if they could be satisfied from cache.",
    )
    output = EnumOption(
        "--output",
        default=ShowOutput.FAILED,
        help="Show stdout/stderr for these tests.",
    )
    use_coverage = BoolOption(
        "--use-coverage",
        default=False,
        help="Generate a coverage report if the test runner supports it.",
    )
    open_coverage = BoolOption(
        "--open-coverage",
        default=False,
        help=softwrap("""
            If a coverage report file is generated, open it on the local system if the
            system supports this.
            """),
    )
    report = BoolOption("--report",
                        default=False,
                        advanced=True,
                        help="Write test reports to --report-dir.")
    default_report_path = str(PurePath("{distdir}", "test", "reports"))
    _report_dir = StrOption(
        "--report-dir",
        default=default_report_path,
        advanced=True,
        help=
        "Path to write test reports to. Must be relative to the build root.",
    )
    extra_env_vars = StrListOption(
        "--extra-env-vars",
        help=softwrap("""
            Additional environment variables to include in test processes.
            Entries are strings in the form `ENV_VAR=value` to use explicitly; or just
            `ENV_VAR` to copy the value of a variable in Pants's own environment.
            """),
    )
    shard = StrOption(
        "--shard",
        default="",
        help=softwrap("""
            A shard specification of the form "k/N", where N is a positive integer and k is a
            non-negative integer less than N.

            If set, the request input targets will be deterministically partitioned into N disjoint
            subsets of roughly equal size, and only the k'th subset will be used, with all others
            discarded.

            Useful for splitting large numbers of test files across multiple machines in CI.
            For example, you can run three shards with --shard=0/3, --shard=1/3, --shard=2/3.

            Note that the shards are roughly equal in size as measured by number of files.
            No attempt is made to consider the size of different files, the time they have
            taken to run in the past, or other such sophisticated measures.
            """),
    )

    def report_dir(self, distdir: DistDir) -> PurePath:
        return PurePath(self._report_dir.format(distdir=distdir.relpath))
Beispiel #13
0
class PythonSetup(Subsystem):
    options_scope = "python"
    help = "Options for Pants's Python backend."

    default_interpreter_constraints = ["CPython>=3.7,<4"]
    default_interpreter_universe = ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]

    interpreter_constraints = StrListOption(
        "--interpreter-constraints",
        default=default_interpreter_constraints,
        help=softwrap(
            """
            The Python interpreters your codebase is compatible with.

            These constraints are used as the default value for the `interpreter_constraints`
            field of Python targets.

            Specify with requirement syntax, e.g. 'CPython>=2.7,<3' (A CPython interpreter with
            version >=2.7 AND version <3) or 'PyPy' (A pypy interpreter of any version). Multiple
            constraint strings will be ORed together.
            """
        ),
        advanced=True,
        metavar="<requirement>",
    )
    interpreter_universe = StrListOption(
        "--interpreter-versions-universe",
        default=default_interpreter_universe,
        help=softwrap(
            f"""
            All known Python major/minor interpreter versions that may be used by either
            your code or tools used by your code.

            This is used by Pants to robustly handle interpreter constraints, such as knowing
            when generating lockfiles which Python versions to check if your code is using.

            This does not control which interpreter your code will use. Instead, to set your
            interpreter constraints, update `[python].interpreter_constraints`, the
            `interpreter_constraints` field, and relevant tool options like
            `[isort].interpreter_constraints` to tell Pants which interpreters your code
            actually uses. See {doc_url('python-interpreter-compatibility')}.

            All elements must be the minor and major Python version, e.g. '2.7' or '3.10'. Do
            not include the patch version.
            """
        ),
        advanced=True,
    )
    enable_resolves = BoolOption(
        "--enable-resolves",
        default=False,
        help=softwrap(
            f"""
            Set to true to enable lockfiles for user code. See `[python].resolves` for an
            explanation of this feature.

            Warning: the `generate-lockfiles` goal does not yet work if you have local
            requirements, regardless of using Pex vs. Poetry for the lockfile generator.
            Support is coming in a future Pants release. In the meantime, the workaround is to host
            the files in a custom repository with `[python-repos]`
            ({doc_url('python-third-party-dependencies#custom-repositories')}).

            You may also run into issues generating lockfiles when using Poetry as the generator,
            rather than Pex. See the option `[python].lockfile_generator` for more
            information.

            This option is mutually exclusive with `[python].requirement_constraints`. We strongly
            recommend using this option because it:

              1. Uses `--hash` to validate that all downloaded files are expected, which reduces\
                the risk of supply chain attacks.
              2. Enforces that all transitive dependencies are in the lockfile, whereas\
                constraints allow you to leave off dependencies. This ensures your build is more\
                stable and reduces the risk of supply chain attacks.
              3. Allows you to have multiple lockfiles in your repository.
            """
        ),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolves = DictOption[str](
        "--resolves",
        default={"python-default": "3rdparty/python/default.lock"},
        help=softwrap(
            f"""
            A mapping of logical names to lockfile paths used in your project.

            Many organizations only need a single resolve for their whole project, which is
            a good default and often the simplest thing to do. However, you may need multiple
            resolves, such as if you use two conflicting versions of a requirement in
            your repository.

            If you only need a single resolve, run `{bin_name()} generate-lockfiles` to
            generate the lockfile.

            If you need multiple resolves:

              1. Via this option, define multiple resolve names and their lockfile paths.\
                The names should be meaningful to your repository, such as `data-science` or\
                `pants-plugins`.
              2. Set the default with `[python].default_resolve`.
              3. Update your `python_requirement` targets with the `resolve` field to declare which\
                resolve they should be available in. They default to `[python].default_resolve`,\
                so you only need to update targets that you want in non-default resolves.\
                (Often you'll set this via the `python_requirements` or `poetry_requirements`\
                target generators)
              4. Run `{bin_name()} generate-lockfiles` to generate the lockfiles. If the results\
                aren't what you'd expect, adjust the prior step.
              5. Update any targets like `python_source` / `python_sources`,\
                `python_test` / `python_tests`, and `pex_binary` which need to set a non-default\
                resolve with the `resolve` field.

            If a target can work with multiple resolves, you can either use the `parametrize`
            mechanism or manually create a distinct target per resolve. See {doc_url("targets")}
            for information about `parametrize`.

            For example:

                python_sources(
                    resolve=parametrize("data-science", "web-app"),
                )

            You can name the lockfile paths what you would like; Pants does not expect a
            certain file extension or location.

            Only applies if `[python].enable_resolves` is true.
            """
        ),
        advanced=True,
    )
    default_resolve = StrOption(
        "--default-resolve",
        default="python-default",
        help=softwrap(
            """
            The default value used for the `resolve` field.

            The name must be defined as a resolve in `[python].resolves`.
            """
        ),
        advanced=True,
    )
    _resolves_to_interpreter_constraints = DictOption["list[str]"](
        "--resolves-to-interpreter-constraints",
        help=softwrap(
            """
            Override the interpreter constraints to use when generating a resolve's lockfile
            with the `generate-lockfiles` goal.

            By default, each resolve from `[python].resolves` will use your
            global interpreter constraints set in `[python].interpreter_constraints`. With
            this option, you can override each resolve to use certain interpreter
            constraints, such as `{'data-science': ['==3.8.*']}`.

            Warning: this does NOT impact the interpreter constraints used by targets within the
            resolve, which is instead set by the option `[python.interpreter_constraints` and the
            `interpreter_constraints` field. It only impacts how the lockfile is generated.

            Pants will validate that the interpreter constraints of your code using a
            resolve are compatible with that resolve's own constraints. For example, if your
            code is set to use ['==3.9.*'] via the `interpreter_constraints` field, but it's
            using a resolve whose interpreter constraints are set to ['==3.7.*'], then
            Pants will error explaining the incompatibility.

            The keys must be defined as resolves in `[python].resolves`.
            """
        ),
        advanced=True,
    )
    invalid_lockfile_behavior = EnumOption(
        "--invalid-lockfile-behavior",
        default=InvalidLockfileBehavior.error,
        help=softwrap(
            """
            The behavior when a lockfile has requirements or interpreter constraints that are
            not compatible with what the current build is using.

            We recommend keeping the default of `error` for CI builds.

            Note that `warn` will still expect a Pants lockfile header, it only won't error if
            the lockfile is stale and should be regenerated. Use `ignore` to avoid needing a
            lockfile header at all, e.g. if you are manually managing lockfiles rather than
            using the `generate-lockfiles` goal.
            """
        ),
        advanced=True,
    )
    _lockfile_generator = EnumOption(
        "--lockfile-generator",
        default=LockfileGenerator.PEX,
        help=softwrap(
            f"""
            Whether to use Pex or Poetry with the `generate-lockfiles` goal.

            Poetry does not support these features:

              1) `[python-repos]` for custom indexes/cheeseshops.
              2) VCS (Git) requirements.
              3) `[GLOBAL].ca_certs_path`.

            If you use any of these features, you should use Pex.

            Several users have also had issues with how Poetry's lockfile generation handles
            environment markers for transitive dependencies; certain dependencies end up with
            nonsensical environment markers which cause the dependency to not be installed, then
            for Pants/Pex to complain the dependency is missing, even though it's in the
            lockfile. There is a workaround: for `[python].resolves`, manually create a
            `python_requirement` target for the problematic transitive dependencies so that they
            are seen as direct requirements, rather than transitive. For tool lockfiles, add the
            problematic transitive dependency to `[tool].extra_requirements`, e.g.
            `[isort].extra_requirements`. Then, regenerate the lockfile(s) with the
            `generate-lockfiles` goal. Alternatively, use Pex for generation.

            Finally, installing from a Poetry-generated lockfile is slower than installing from a
            Pex lockfile. When using a Pex lockfile, Pants will only install the subset needed
            for the current task.

            However, Pex lockfile generation is a new feature. Given how vast the Python packaging
            ecosystem is, it is possible you may experience edge cases / bugs we haven't yet
            covered. Bug reports are appreciated!
            https://github.com/pantsbuild/pants/issues/new/choose

            Note that while Pex generates locks in a proprietary JSON format, you can use the
            `{bin_name()} export` goal for Pants to create a virtual environment for
            interoperability with tools like IDEs.
            """
        ),
        advanced=True,
    )
    resolves_generate_lockfiles = BoolOption(
        "--resolves-generate-lockfiles",
        default=True,
        help=softwrap(
            """
            If False, Pants will not attempt to generate lockfiles for `[python].resolves` when
            running the `generate-lockfiles` goal.

            This is intended to allow you to manually generate lockfiles as a workaround for the
            issues described in the `[python].lockfile_generator` option, if you are not yet ready
            to use Pex.

            If you set this to False, Pants will not attempt to validate the metadata headers
            for your user lockfiles. This is useful so that you can keep
            `[python].invalid_lockfile_behavior` to `error` or `warn` if you'd like so that tool
            lockfiles continue to be validated, while user lockfiles are skipped.
            """
        ),
        advanced=True,
    )
    run_against_entire_lockfile = BoolOption(
        "--run-against-entire-lockfile",
        default=False,
        help=softwrap(
            """
            If enabled, when running binaries, tests, and repls, Pants will use the entire
            lockfile file instead of just the relevant subset.

            We generally do not recommend this if `[python].lockfile_generator` is set to `"pex"`
            thanks to performance enhancements we've made. When using Pex lockfiles, you should
            get similar performance to using this option but without the downsides mentioned below.

            Otherwise, if not using Pex lockfiles, this option can improve
            performance and reduce cache size. But it has two consequences: 1) All cached test
            results will be invalidated if any requirement in the lockfile changes, rather
            than just those that depend on the changed requirement. 2) Requirements unneeded
            by a test/run/repl will be present on the sys.path, which might in rare cases
            cause their behavior to change.

            This option does not affect packaging deployable artifacts, such as
            PEX files, wheels and cloud functions, which will still use just the exact
            subset of requirements needed.
            """
        ),
        advanced=True,
    )
    requirement_constraints = FileOption(
        "--requirement-constraints",
        default=None,
        help=softwrap(
            """
            When resolving third-party requirements for your own code (vs. tools you run),
            use this constraints file to determine which versions to use.

            Mutually exclusive with `[python].enable_resolves`, which we generally recommend as an
            improvement over constraints file.

            See https://pip.pypa.io/en/stable/user_guide/#constraints-files for more
            information on the format of constraint files and how constraints are applied in
            Pex and pip.

            This only applies when resolving user requirements, rather than tools you run
            like Black and Pytest. To constrain tools, set `[tool].lockfile`, e.g.
            `[black].lockfile`.
            """
        ),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolve_all_constraints = BoolOption(
        "--resolve-all-constraints",
        default=True,
        help=softwrap(
            """
            (Only relevant when using `[python].requirement_constraints.`) If enabled, when
            resolving requirements, Pants will first resolve your entire
            constraints file as a single global resolve. Then, if the code uses a subset of
            your constraints file, Pants will extract the relevant requirements from that
            global resolve so that only what's actually needed gets used. If disabled, Pants
            will not use a global resolve and will resolve each subset of your requirements
            independently.

            Usually this option should be enabled because it can result in far fewer resolves.
            """
        ),
        advanced=True,
    )
    no_binary = StrListOption(
        "--no-binary",
        help=softwrap(
            """
            Do not use binary packages (i.e., wheels) for these 3rdparty projects.

            Also accepts `:all:` to disable all binary packages.

            Note that some packages are tricky to compile and may fail to install when this option
            is used on them. See https://pip.pypa.io/en/stable/cli/pip_install/#install-no-binary
            for details.

            Note: Only takes effect if you use Pex lockfiles. Set
            `[python].lockfile_generator = "pex"` and run the `generate-lockfiles` goal.
            """
        ),
    )
    only_binary = StrListOption(
        "--only-binary",
        help=softwrap(
            """
            Do not use source packages (i.e., sdists) for these 3rdparty projects.

            Also accepts `:all:` to disable all source packages.

            Packages without binary distributions will fail to install when this option is used on
            them. See https://pip.pypa.io/en/stable/cli/pip_install/#install-only-binary for
            details.

            Note: Only takes effect if you use Pex lockfiles. Set
            `[python].lockfile_generator = "pex"` and run the `generate-lockfiles` goal.
            """
        ),
    )
    resolver_manylinux = StrOption(
        "--resolver-manylinux",
        default="manylinux2014",
        help=softwrap(
            """
            Whether to allow resolution of manylinux wheels when resolving requirements for
            foreign linux platforms. The value should be a manylinux platform upper bound,
            e.g.: 'manylinux2010', or else the string 'no' to disallow.
            """
        ),
        advanced=True,
    )

    tailor_source_targets = BoolOption(
        "--tailor-source-targets",
        default=True,
        help=softwrap(
            """
            If true, add `python_sources`, `python_tests`, and `python_test_utils` targets with
            the `tailor` goal."""
        ),
        advanced=True,
    )
    tailor_ignore_solitary_init_files = BoolOption(
        "--tailor-ignore-solitary-init-files",
        default=True,
        help=softwrap(
            """
            If true, don't add `python_sources` targets for solitary `__init__.py` files with the
            `tailor` goal.

            Solitary `__init__.py` files usually exist as import scaffolding rather than true
            library code, so it can be noisy to add BUILD files.

            Set to false if you commonly have packages containing real code in
            `__init__.py` without other `.py` files in the package.
            """
        ),
        advanced=True,
    )
    tailor_requirements_targets = BoolOption(
        "--tailor-requirements-targets",
        default=True,
        help=softwrap(
            """
            If true, add `python_requirements`, `poetry_requirements`, and `pipenv_requirements`
            target generators with the `tailor` goal.

            `python_requirements` targets are added for any file that matches the pattern
            `*requirements*.txt`. You will need to manually add `python_requirements` for different
            file names like `reqs.txt`.

            `poetry_requirements` targets are added for `pyproject.toml` files with `[tool.poetry`
            in them.
            """
        ),
        advanced=True,
    )
    tailor_pex_binary_targets = BoolOption(
        "--tailor-pex-binary-targets",
        default=True,
        help=softwrap(
            """
            If true, add `pex_binary` targets for Python files named `__main__.py` or with a
            `__main__` clause with the `tailor` goal.
            """
        ),
        advanced=True,
    )

    macos_big_sur_compatibility = BoolOption(
        "--macos-big-sur-compatibility",
        default=False,
        help=softwrap(
            """
            If set, and if running on MacOS Big Sur, use macosx_10_16 as the platform
            when building wheels. Otherwise, the default of macosx_11_0 will be used.
            This may be required for pip to be able to install the resulting distribution
            on Big Sur.
            """
        ),
        advanced=True,
    )

    @property
    def generate_lockfiles_with_pex(self) -> bool:
        """Else, generate with Poetry."""
        return self._lockfile_generator == LockfileGenerator.PEX

    @memoized_property
    def resolves_to_interpreter_constraints(self) -> dict[str, tuple[str, ...]]:
        result = {}
        for resolve, ics in self._resolves_to_interpreter_constraints.items():
            if resolve not in self.resolves:
                raise KeyError(
                    softwrap(
                        f"""
                        Unrecognized resolve name in the option
                        `[python].resolves_to_interpreter_constraints`: {resolve}. Each
                        key must be one of the keys in `[python].resolves`:
                        {sorted(self.resolves.keys())}
                        """
                    )
                )
            result[resolve] = tuple(ics)
        return result

    def resolve_all_constraints_was_set_explicitly(self) -> bool:
        return not self.options.is_default("resolve_all_constraints")

    @property
    def manylinux(self) -> str | None:
        manylinux = cast(Optional[str], self.resolver_manylinux)
        if manylinux is None or manylinux.lower() in ("false", "no", "none"):
            return None
        return manylinux

    @property
    def manylinux_pex_args(self) -> Iterator[str]:
        if self.manylinux:
            yield "--manylinux"
            yield self.manylinux
        else:
            yield "--no-manylinux"

    @property
    def scratch_dir(self):
        return os.path.join(self.options.pants_workdir, *self.options_scope.split("."))

    def compatibility_or_constraints(self, compatibility: Iterable[str] | None) -> tuple[str, ...]:
        """Return either the given `compatibility` field or the global interpreter constraints.

        If interpreter constraints are supplied by the CLI flag, return those only.
        """
        if self.options.is_flagged("interpreter_constraints"):
            return self.interpreter_constraints
        return tuple(compatibility or self.interpreter_constraints)

    def compatibilities_or_constraints(
        self, compatibilities: Iterable[Iterable[str] | None]
    ) -> tuple[str, ...]:
        return tuple(
            constraint
            for compatibility in compatibilities
            for constraint in self.compatibility_or_constraints(compatibility)
        )
Beispiel #14
0
class PythonInferSubsystem(Subsystem):
    options_scope = "python-infer"
    help = "Options controlling which dependencies will be inferred for Python targets."

    imports = BoolOption(
        "--imports",
        default=True,
        help=
        ("Infer a target's imported dependencies by parsing import statements from sources."
         ),
    )
    string_imports = BoolOption(
        "--string-imports",
        default=False,
        help=
        ("Infer a target's dependencies based on strings that look like dynamic "
         "dependencies, such as Django settings files expressing dependencies as strings. "
         "To ignore any false positives, put `!{bad_address}` in the `dependencies` field "
         "of your target."),
    )
    string_imports_min_dots = IntOption(
        "--string-imports-min-dots",
        default=2,
        help=
        ("If --string-imports is True, treat valid-looking strings with at least this many "
         "dots in them as potential dynamic dependencies. E.g., `'foo.bar.Baz'` will be "
         "treated as a potential dependency if this option is set to 2 but not if set to 3."
         ),
    )
    assets = BoolOption(
        "--assets",
        default=False,
        help=
        ("Infer a target's asset dependencies based on strings that look like Posix "
         "filepaths, such as those given to `open` or `pkgutil.get_data`. To ignore any "
         "false positives, put `!{bad_address}` in the `dependencies` field of your target."
         ),
    )
    assets_min_slashes = IntOption(
        "--assets-min-slashes",
        default=1,
        help=
        ("If --assets is True, treat valid-looking strings with at least this many forward "
         "slash characters as potential assets. E.g. `'data/databases/prod.db'` will be "
         "treated as a potential candidate if this option is set to 2 but not to 3."
         ),
    )
    inits = BoolOption(
        "--inits",
        default=False,
        help=
        ("Infer a target's dependencies on any `__init__.py` files in the packages "
         "it is located in (recursively upward in the directory structure).\n\nEven if this "
         "is disabled, Pants will still include any ancestor `__init__.py` files, only they "
         "will not be 'proper' dependencies, e.g. they will not show up in "
         f"`{bin_name()} dependencies` and their own dependencies will not be used.\n\nIf you "
         "have empty `__init__.py` files, it's safe to leave this option off; otherwise, "
         "you should enable this option."),
    )
    conftests = BoolOption(
        "--conftests",
        default=True,
        help=
        ("Infer a test target's dependencies on any conftest.py files in the current "
         "directory and ancestor directories."),
    )
    entry_points = BoolOption(
        "--entry-points",
        default=True,
        help=(
            "Infer dependencies on targets' entry points, e.g. `pex_binary`'s "
            "`entry_point` field, `python_awslambda`'s `handler` field and "
            "`python_distribution`'s `entry_points` field."),
    )
    unowned_dependency_behavior = EnumOption(
        "--unowned-dependency-behavior",
        default=UnownedDependencyUsage.DoNothing,
        help=(
            "How to handle inferred dependencies that don't have any owner."),
    )
Beispiel #15
0
class PythonSetup(Subsystem):
    options_scope = "python"
    help = "Options for Pants's Python backend."

    default_interpreter_constraints = ["CPython>=3.7,<4"]
    default_interpreter_universe = [
        "2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"
    ]

    interpreter_constraints = StrListOption(
        "--interpreter-constraints",
        default=default_interpreter_constraints,
        help=
        ("The Python interpreters your codebase is compatible with.\n\nSpecify with "
         "requirement syntax, e.g. 'CPython>=2.7,<3' (A CPython interpreter with version "
         ">=2.7 AND version <3) or 'PyPy' (A pypy interpreter of any version). Multiple "
         "constraint strings will be ORed together.\n\nThese constraints are used as the "
         "default value for the `interpreter_constraints` field of Python targets."
         ),
        advanced=True,
        metavar="<requirement>",
    )
    interpreter_universe = StrListOption(
        "--interpreter-versions-universe",
        default=default_interpreter_universe,
        help=
        ("All known Python major/minor interpreter versions that may be used by either "
         "your code or tools used by your code.\n\n"
         "This is used by Pants to robustly handle interpreter constraints, such as knowing "
         "when generating lockfiles which Python versions to check if your code is "
         "using.\n\n"
         "This does not control which interpreter your code will use. Instead, to set your "
         "interpreter constraints, update `[python].interpreter_constraints`, the "
         "`interpreter_constraints` field, and relevant tool options like "
         "`[isort].interpreter_constraints` to tell Pants which interpreters your code "
         f"actually uses. See {doc_url('python-interpreter-compatibility')}.\n\n"
         "All elements must be the minor and major Python version, e.g. '2.7' or '3.10'. Do "
         "not include the patch version.\n\n"),
        advanced=True,
    )
    requirement_constraints = FileOption(
        "--requirement-constraints",
        default=None,
        help=
        ("When resolving third-party requirements for your own code (vs. tools you run), "
         "use this constraints file to determine which versions to use.\n\n"
         "This only applies when resolving user requirements, rather than tools you run "
         "like Black and Pytest. To constrain tools, set `[tool].lockfile`, e.g. "
         "`[black].lockfile`.\n\n"
         "See https://pip.pypa.io/en/stable/user_guide/#constraints-files for more "
         "information on the format of constraint files and how constraints are applied in "
         "Pex and pip.\n\n"
         "Mutually exclusive with `[python].enable_resolves`."),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolve_all_constraints = BoolOption(
        "--resolve-all-constraints",
        default=True,
        help=
        ("If enabled, when resolving requirements, Pants will first resolve your entire "
         "constraints file as a single global resolve. Then, if the code uses a subset of "
         "your constraints file, Pants will extract the relevant requirements from that "
         "global resolve so that only what's actually needed gets used. If disabled, Pants "
         "will not use a global resolve and will resolve each subset of your requirements "
         "independently."
         "\n\nUsually this option should be enabled because it can result in far fewer "
         "resolves."
         "\n\nRequires [python].requirement_constraints to be set."),
        advanced=True,
    )
    enable_resolves = BoolOption(
        "--enable-resolves",
        default=False,
        help=
        ("Set to true to enable the multiple resolves mechanism. See "
         "`[python].resolves` for an explanation of this feature.\n\n"
         "Warning: the `generate-lockfiles` goal does not yet work if you have VCS (Git) "
         "requirements and local requirements. Support is coming in a future Pants release. You "
         "can still use multiple resolves, but you must manually generate your lockfiles rather "
         "than using the `generate-lockfiles` goal, e.g. by running `pip freeze`. Specifically, "
         "set up `[python].resolves` to point to your manually generated lockfile paths, and "
         "then set `[python].resolves_generate_lockfiles = false` in `pants.toml`.\n\n"
         "You may also run into issues generating lockfiles when using Poetry as the generator, "
         "rather than Pex. See the option `[python].lockfile_generator` for more "
         "information.\n\n"
         "The resolves feature offers three major benefits compared to "
         "`[python].requirement_constraints`:\n\n"
         "  1. Uses `--hash` to validate that all downloaded files are expected, which "
         "reduces the risk of supply chain attacks.\n"
         "  2. Enforces that all transitive dependencies are in the lockfile, whereas "
         "constraints allow you to leave off dependencies. This ensures your build is more "
         "stable and reduces the risk of supply chain attacks.\n"
         "  3. Allows you to have multiple resolves in your repository.\n\n"
         "Mutually exclusive with `[python].requirement_constraints`."),
        advanced=True,
        mutually_exclusive_group="lockfile",
    )
    resolves = DictOption[str](
        "--resolves",
        default={
            "python-default": "3rdparty/python/default.lock"
        },
        help=
        ("A mapping of logical names to lockfile paths used in your project.\n\n"
         "Many organizations only need a single resolve for their whole project, which is "
         "a good default and the simplest thing to do. However, you may need multiple "
         "resolves, such as if you use two conflicting versions of a requirement in "
         "your repository.\n\n"
         "For now, Pants only has first-class support for disjoint resolves, meaning that "
         "you cannot ergonomically set a `python_requirement` or `python_source` target, "
         "for example, to work with multiple resolves. Practically, this means that you "
         "cannot yet ergonomically reuse common code, such as util files, across projects "
         "using different resolves. Support for overlapping resolves is coming in Pants 2.11 "
         "through a new 'parametrization' feature.\n\n"
         f"If you only need a single resolve, run `{bin_name()} generate-lockfiles` to "
         "generate the lockfile.\n\n"
         "If you need multiple resolves:\n\n"
         "  1. Via this option, define multiple resolve "
         "names and their lockfile paths. The names should be meaningful to your "
         "repository, such as `data-science` or `pants-plugins`.\n"
         "  2. Set the default with `[python].default_resolve`.\n"
         "  3. Update your `python_requirement` targets with the "
         "`resolve` field to declare which resolve they should "
         "be available in. They default to `[python].default_resolve`, so you "
         "only need to update targets that you want in non-default resolves. "
         "(Often you'll set this via the `python_requirements` or `poetry_requirements` "
         "target generators)\n"
         f"  4. Run `{bin_name()} generate-lockfiles` to generate the lockfiles. If the results "
         "aren't what you'd expect, adjust the prior step.\n"
         "  5. Update any targets like `python_source` / `python_sources`, "
         "`python_test` / `python_tests`, and `pex_binary` which need to set a non-default "
         "resolve with the `resolve` field.\n\n"
         "You can name the lockfile paths what you would like; Pants does not expect a "
         "certain file extension or location.\n\n"
         "Only applies if `[python].enable_resolves` is true."),
        advanced=True,
    )
    default_resolve = StrOption(
        "--default-resolve",
        default="python-default",
        help=("The default value used for the `resolve` field.\n\n"
              "The name must be defined as a resolve in `[python].resolves`."),
        advanced=True,
    )
    _resolves_to_interpreter_constraints = DictOption["list[str]"](
        "--resolves-to-interpreter-constraints",
        help=
        ("Override the interpreter constraints to use when generating a resolve's lockfile "
         "with the `generate-lockfiles` goal.\n\n"
         "By default, each resolve from `[python].resolves` will use your "
         "global interpreter constraints set in `[python].interpreter_constraints`. With "
         "this option, you can override each resolve to use certain interpreter "
         "constraints, such as `{'data-science': ['==3.8.*']}`.\n\n"
         "Pants will validate that the interpreter constraints of your code using a "
         "resolve are compatible with that resolve's own constraints. For example, if your "
         "code is set to use ['==3.9.*'] via the `interpreter_constraints` field, but it's "
         "also using a resolve whose interpreter constraints are set to ['==3.7.*'], then "
         "Pants will error explaining the incompatibility.\n\n"
         "The keys must be defined as resolves in `[python].resolves`."),
        advanced=True,
    )
    invalid_lockfile_behavior = EnumOption(
        "--invalid-lockfile-behavior",
        default=InvalidLockfileBehavior.error,
        help=
        ("The behavior when a lockfile has requirements or interpreter constraints that are "
         "not compatible with what the current build is using.\n\n"
         "We recommend keeping the default of `error` for CI builds.\n\n"
         "Note that `warn` will still expect a Pants lockfile header, it only won't error if "
         "the lockfile is stale and should be regenerated. Use `ignore` to avoid needing a "
         "lockfile header at all, e.g. if you are manually managing lockfiles rather than "
         "using the `generate-lockfiles` goal."),
        advanced=True,
    )
    _lockfile_generator = EnumOption(
        "--lockfile-generator",
        default=LockfileGenerator.POETRY,
        help=
        ("Whether to use Pex or Poetry with the `generate-lockfiles` goal.\n\n"
         "Poetry does not work with `[python-repos]` for custom indexes/cheeseshops. If you use "
         "this feature, you should use Pex.\n\n"
         "Several users have also had issues with how Poetry's lockfile generation handles "
         "environment markers for transitive dependencies; certain dependencies end up with "
         "nonsensical environment markers which cause the dependency to not be installed, then "
         "for Pants/Pex to complain the dependency is missing, even though it's in the "
         "lockfile. There is a workaround: for `[python].resolves`, manually create a "
         "`python_requirement` target for the problematic transitive dependencies so that they "
         "are seen as direct requirements, rather than transitive. For tool lockfiles, add the "
         "problematic transitive dependency to `[tool].extra_requirements`, e.g. "
         "`[isort].extra_requirements`. Then, regenerate the lockfile(s) with the "
         "`generate-lockfiles` goal. Alternatively, use Pex for generation.\n\n"
         "Finally, installing from a Poetry-generated lockfile is slower than installing from a "
         "Pex lockfile.\n\n"
         "However, Pex lockfile generation is a new feature. Given how vast the Python packaging "
         "ecosystem is, it is possible you may experience edge cases / bugs we haven't yet "
         "covered. Bug reports are appreciated! "
         "https://github.com/pantsbuild/pants/issues/new/choose\n\n"
         "Note that while Pex generates locks in a proprietary JSON format, you can use the "
         f"`{bin_name()} export` goal for Pants to create a virtual environment for "
         f"interoperability with tools like IDEs."),
    )
    resolves_generate_lockfiles = BoolOption(
        "--resolves-generate-lockfiles",
        default=True,
        help=
        ("If False, Pants will not attempt to generate lockfiles for `[python].resolves` when "
         "running the `generate-lockfiles` goal.\n\n"
         "This is intended to allow you to manually generate lockfiles as a workaround for the "
         "issues described in the `[python].enable_resolves` option.\n\n"
         "If you set this to False, Pants will not attempt to validate the metadata headers "
         "for your user lockfiles. This is useful so that you can keep "
         "`[python].invalid_lockfile_behavior` to `error` or `warn` if you'd like so that tool "
         "lockfiles continue to be validated, while user lockfiles are skipped."
         ),
        advanced=True,
    )
    run_against_entire_lockfile = BoolOption(
        "--run-against-entire-lockfile",
        default=False,
        help=
        ("If enabled, when running binaries, tests, and repls, Pants will use the entire "
         "lockfile/constraints file instead of just the relevant subset. This can improve "
         "performance and reduce cache size, but has two consequences: 1) All cached test "
         "results will be invalidated if any requirement in the lockfile changes, rather "
         "than just those that depend on the changed requirement. 2) Requirements unneeded "
         "by a test/run/repl will be present on the sys.path, which might in rare cases "
         "cause their behavior to change.\n\n"
         "This option does not affect packaging deployable artifacts, such as "
         "PEX files, wheels and cloud functions, which will still use just the exact "
         "subset of requirements needed."),
        advanced=True,
    )
    resolver_manylinux = StrOption(
        "--resolver-manylinux",
        default="manylinux2014",
        help=
        "Whether to allow resolution of manylinux wheels when resolving requirements for "
        "foreign linux platforms. The value should be a manylinux platform upper bound, "
        "e.g.: 'manylinux2010', or else the string 'no' to disallow.",
        advanced=True,
    )
    tailor_ignore_solitary_init_files = BoolOption(
        "--tailor-ignore-solitary-init-files",
        default=True,
        help=
        "Don't tailor `python_sources` targets for solitary `__init__.py` files, as "
        "those usually exist as import scaffolding rather than true library code.\n\n"
        "Set to False if you commonly have packages containing real code in "
        "`__init__.py` and there are no other .py files in the package.",
        advanced=True,
    )
    tailor_requirements_targets = BoolOption(
        "--tailor-requirements-targets",
        default=True,
        help="Tailor python_requirements() targets for requirements files.",
        advanced=True,
    )
    tailor_pex_binary_targets = BoolOption(
        "--tailor-pex-binary-targets",
        default=True,
        help="Tailor pex_binary() targets for Python entry point files.",
        advanced=True,
    )
    macos_big_sur_compatibility = BoolOption(
        "--macos-big-sur-compatibility",
        default=False,
        help=
        "If set, and if running on MacOS Big Sur, use macosx_10_16 as the platform "
        "when building wheels. Otherwise, the default of macosx_11_0 will be used. "
        "This may be required for pip to be able to install the resulting distribution "
        "on Big Sur.",
    )

    @property
    def generate_lockfiles_with_pex(self) -> bool:
        """Else, generate with Poetry."""
        if self.options.is_default("lockfile_generator"):
            warn_or_error(
                "2.12.0.dev0",
                "`[python].lockfile_generator` defaulting to 'poetry'",
                softwrap(f"""
                    In Pants 2.12, Pants will default to using Pex to generate lockfiles
                    with the `generate-lockfiles` goal, rather than Poetry. Run
                    `{bin_name()} help-advanced python` for more information on the benefits and
                    possible issues with switching to Pex.

                    To keep using Poetry, set `[python].lockfile_generator = 'poetry'` in
                    pants.toml. To try Pex, set to 'pex'.

                    Note that you can incrementally switch to Pex lockfiles if you want to reduce
                    risk while migrating. The option `[python].lockfile_generator` only impacts
                    how Pants generates new lockfiles; you can continue to use
                    requirements.txt-style lockfiles (i.e. those generated by Poetry) even if
                    new lockfiles are generated in Pex's JSON format. For example, you can run
                    `{bin_name()} --python-lockfile-generator=pex generate-lockfiles
                    --resolve=isort` to only regenerate the isort lockfile.
                    """),
            )

        return self._lockfile_generator == LockfileGenerator.PEX

    @memoized_property
    def resolves_to_interpreter_constraints(
            self) -> dict[str, tuple[str, ...]]:
        result = {}
        for resolve, ics in self._resolves_to_interpreter_constraints.items():
            if resolve not in self.resolves:
                raise KeyError(
                    "Unrecognized resolve name in the option "
                    f"`[python].resolves_to_interpreter_constraints`: {resolve}. Each "
                    "key must be one of the keys in `[python].resolves`: "
                    f"{sorted(self.resolves.keys())}")
            result[resolve] = tuple(ics)
        return result

    def resolve_all_constraints_was_set_explicitly(self) -> bool:
        return not self.options.is_default("resolve_all_constraints")

    @property
    def manylinux(self) -> str | None:
        manylinux = cast(Optional[str], self.resolver_manylinux)
        if manylinux is None or manylinux.lower() in ("false", "no", "none"):
            return None
        return manylinux

    @property
    def manylinux_pex_args(self) -> Iterator[str]:
        if self.manylinux:
            yield "--manylinux"
            yield self.manylinux
        else:
            yield "--no-manylinux"

    @property
    def scratch_dir(self):
        return os.path.join(self.options.pants_workdir,
                            *self.options_scope.split("."))

    def compatibility_or_constraints(
            self, compatibility: Iterable[str] | None) -> tuple[str, ...]:
        """Return either the given `compatibility` field or the global interpreter constraints.

        If interpreter constraints are supplied by the CLI flag, return those only.
        """
        if self.options.is_flagged("interpreter_constraints"):
            return self.interpreter_constraints
        return tuple(compatibility or self.interpreter_constraints)

    def compatibilities_or_constraints(
            self, compatibilities: Iterable[Iterable[str] | None]
    ) -> tuple[str, ...]:
        return tuple(
            constraint for compatibility in compatibilities
            for constraint in self.compatibility_or_constraints(compatibility))
Beispiel #16
0
class ExternalTool(Subsystem, metaclass=ABCMeta):
    """Configuration for an invocable tool that we download from an external source.

    Subclass this to configure a specific tool.


    Idiomatic use:

    class MyExternalTool(ExternalTool):
        options_scope = "my-external-tool"
        default_version = "1.2.3"
        default_known_versions = [
          "1.2.3|linux_arm64 |feed6789feed6789feed6789feed6789feed6789feed6789feed6789feed6789|112233",
          "1.2.3|linux_x86_64|cafebabacafebabacafebabacafebabacafebabacafebabacafebabacafebaba|878986",
          "1.2.3|macos_arm64 |deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef|222222",
          "1.2.3|macos_x86_64|1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd|333333",
        ]

        version_constraints = ">=1.2.3, <2.0"

        def generate_url(self, plat: Platform) -> str:
            ...

        def generate_exe(self, plat: Platform) -> str:
            return "./path-to/binary

    @rule
    def my_rule(my_external_tool: MyExternalTool) -> Foo:
        downloaded_tool = await Get(
            DownloadedExternalTool,
            ExternalToolRequest,
            my_external_tool.get_request(Platform.current)
        )
        ...
    """

    # The default values for --version and --known-versions, and the supported versions.
    # Subclasses must set appropriately.
    default_version: str
    default_known_versions: list[str]
    version_constraints: str | None = None

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.check_version_constraints()

    @classproperty
    def name(cls):
        """The name of the tool, for use in user-facing messages.

        Derived from the classname, but subclasses can override, e.g., with a classproperty.
        """
        return cls.__name__.lower()

    version = StrOption(
        "--version",
        default=lambda cls: cls.default_version,
        advanced=True,
        help=lambda cls: f"Use this version of {cls.name}." +
        (f"\n\nSupported {cls.name} versions: {cls.version_constraints}"
         if cls.version_constraints else ""),
    )

    # Note that you can compute the length and sha256 conveniently with:
    #   `curl -L $URL | tee >(wc -c) >(shasum -a 256) >/dev/null`
    known_versions = StrListOption(
        "--known-versions",
        default=lambda cls: cls.default_known_versions,
        advanced=True,
        help=textwrap.dedent(f"""
        Known versions to verify downloads against.

        Each element is a pipe-separated string of `version|platform|sha256|length`, where:

            - `version` is the version string
            - `platform` is one of [{','.join(Platform.__members__.keys())}],
            - `sha256` is the 64-character hex representation of the expected sha256
            digest of the download file, as emitted by `shasum -a 256`
            - `length` is the expected length of the download file in bytes, as emitted by
            `wc -c`

        E.g., `3.1.2|macos_x86_64|6d0f18cd84b918c7b3edd0203e75569e0c7caecb1367bbbe409b44e28514f5be|42813`.

        Values are space-stripped, so pipes can be indented for readability if necessary.
        """),
    )

    use_unsupported_version = EnumOption(
        "--use-unsupported-version",
        advanced=True,
        help=lambda cls: textwrap.dedent(f"""
                What action to take in case the requested version of {cls.name} is not supported.

                Supported {cls.name} versions: {cls.version_constraints if cls.version_constraints else "unspecified"}
                """),
        default=UnsupportedVersionUsage.RaiseError,
    )

    @abstractmethod
    def generate_url(self, plat: Platform) -> str:
        """Returns the URL for the given version of the tool, runnable on the given os+arch.

        Implementations should raise ExternalToolError if they cannot resolve the arguments
        to a URL. The raised exception need not have a message - a sensible one will be generated.
        """

    def generate_exe(self, plat: Platform) -> str:
        """Returns the path to the tool executable.

        If the downloaded artifact is the executable itself, you can leave this unimplemented.

        If the downloaded artifact is an archive, this should be overridden to provide a
        relative path in the downloaded archive, e.g. `./bin/protoc`.
        """
        return f"./{self.generate_url(plat).rsplit('/', 1)[-1]}"

    def get_request(self, plat: Platform) -> ExternalToolRequest:
        """Generate a request for this tool."""
        for known_version in self.known_versions:
            ver, plat_val, sha256, length = self.split_known_version_str(
                known_version)
            if plat.value == plat_val and ver == self.version:
                return self.get_request_for(plat_val, sha256, length)
        raise UnknownVersion(
            f"No known version of {self.name} {self.version} for {plat.value} found in "
            f"{self.known_versions}")

    @classmethod
    def split_known_version_str(
            cls, known_version: str) -> tuple[str, str, str, int]:
        try:
            ver, plat_val, sha256, length = (x.strip()
                                             for x in known_version.split("|"))
        except ValueError:
            raise ExternalToolError(
                f"Bad value for [{cls.options_scope}].known_versions: {known_version}"
            )
        return ver, plat_val, sha256, int(length)

    def get_request_for(self, plat_val: str, sha256: str,
                        length: int) -> ExternalToolRequest:
        """Generate a request for this tool from the given info."""
        plat = Platform(plat_val)
        digest = FileDigest(fingerprint=sha256, serialized_bytes_length=length)
        try:
            url = self.generate_url(plat)
            exe = self.generate_exe(plat)
        except ExternalToolError as e:
            raise ExternalToolError(
                f"Couldn't find {self.name} version {self.version} on {plat.value}"
            ) from e
        return ExternalToolRequest(
            DownloadFile(url=url, expected_digest=digest), exe)

    def check_version_constraints(self) -> None:
        if not self.version_constraints:
            return None
        # Note that this is not a Python requirement. We're just hackily piggybacking off
        # pkg_resource.Requirement's ability to check version constraints.
        constraints = Requirement.parse(
            f"{self.name}{self.version_constraints}")
        if constraints.specifier.contains(
                self.version):  # type: ignore[attr-defined]
            # all ok
            return None

        msg = [
            f"The option [{self.options_scope}].version is set to {self.version}, which is not "
            f"compatible with what this release of Pants expects: {constraints}.",
            "Please update the version to a supported value, or consider using a different Pants",
            "release if you cannot change the version.",
        ]

        if self.use_unsupported_version is UnsupportedVersionUsage.LogWarning:
            msg.extend([
                "Alternatively, you can ignore this warning (at your own peril) by adding this",
                "to the GLOBAL section of pants.toml:",
                f'ignore_warnings = ["The option [{self.options_scope}].version is set to"].',
            ])
            logger.warning(" ".join(msg))
        elif self.use_unsupported_version is UnsupportedVersionUsage.RaiseError:
            msg.append(
                f"Alternatively, update [{self.options_scope}].use_unsupported_version to be "
                f"'warning'.")
            raise UnsupportedVersion(" ".join(msg))
Beispiel #17
0
class FilterSubsystem(LineOriented, GoalSubsystem):
    name = "filter"
    help = softwrap(
        """
        Filter the input targets based on various criteria.

        Most of the filtering options below are comma-separated lists of filtering criteria, with
        an implied logical OR between them, so that a target passes the filter if it matches any of
        the criteria in the list. A '-' prefix inverts the sense of the entire comma-separated list,
        so that a target passes the filter only if it matches none of the criteria in the list.

        Each of the filtering options may be specified multiple times, with an implied logical AND
        between them.
        """
    )

    target_type = StrListOption(
        "--target-type",
        metavar="[+-]type1,type2,...",
        help="Filter on these target types, e.g. `resources` or `python_sources`.",
    )
    granularity = EnumOption(
        "--granularity",
        default=TargetGranularity.all_targets,
        help=softwrap(
            """
            Filter to rendering only targets declared in BUILD files, only file-level
            targets, or all targets.
            """
        ),
    )
    address_regex = StrListOption(
        "--address-regex",
        metavar="[+-]regex1,regex2,...",
        help="Filter on target addresses matching these regexes.",
    )
    tag_regex = StrListOption(
        "--tag-regex",
        metavar="[+-]regex1,regex2,...",
        help="Filter on targets with tags matching these regexes.",
    )

    def target_type_filters(
        self, registered_target_types: RegisteredTargetTypes
    ) -> list[TargetFilter]:
        def outer_filter(target_alias: str) -> TargetFilter:
            if target_alias not in registered_target_types.aliases:
                raise UnrecognizedTargetTypeException(target_alias, registered_target_types)

            target_type = registered_target_types.aliases_to_types[target_alias]
            if target_type.deprecated_alias and target_alias == target_type.deprecated_alias:
                warn_deprecated_target_type(target_type)

            def inner_filter(tgt: Target) -> bool:
                return tgt.alias == target_alias or bool(
                    tgt.deprecated_alias and tgt.deprecated_alias == target_alias
                )

            return inner_filter

        return create_filters(self.target_type, outer_filter)

    def address_regex_filters(self) -> list[TargetFilter]:
        def outer_filter(address_regex: str) -> TargetFilter:
            regex = compile_regex(address_regex)
            return lambda tgt: bool(regex.search(tgt.address.spec))

        return create_filters(self.address_regex, outer_filter)

    def tag_regex_filters(self) -> list[TargetFilter]:
        def outer_filter(tag_regex: str) -> TargetFilter:
            regex = compile_regex(tag_regex)
            return lambda tgt: any(bool(regex.search(tag)) for tag in tgt.get(Tags).value or ())

        return create_filters(self.tag_regex, outer_filter)

    def granularity_filter(self) -> TargetFilter:
        return match(
            self.granularity,
            {
                TargetGranularity.all_targets: lambda _: True,
                TargetGranularity.file_targets: lambda tgt: tgt.address.is_file_target,
                TargetGranularity.build_targets: lambda tgt: not tgt.address.is_file_target,
            },
        )

    def all_filters(self, registered_target_types: RegisteredTargetTypes) -> TargetFilter:
        return and_filters(
            [
                *self.target_type_filters(registered_target_types),
                *self.address_regex_filters(),
                *self.tag_regex_filters(),
                self.granularity_filter(),
            ]
        )

    def is_specified(self) -> bool:
        """Return true if any of the options are set."""
        return bool(self.target_type or self.address_regex or self.tag_regex or self.granularity)