예제 #1
0
class DebianSources(MultipleSourcesField):
    required = True
    help = softwrap("""
        Paths that will be included in the package to be produced such as Debian metadata files.
        You must include a DEBIAN/control file.

        Paths are relative to the BUILD file's directory and all paths must belong to the same parent directory.
        For example, `sources=['dir/**']` is valid, but `sources=['top_level_file.txt']`
        and `sources=['dir1/*', 'dir2/*']` are not.
        """)

    def validate_resolved_files(self, files: Sequence[str]) -> None:
        """Check that all files are coming from the same directory."""
        super().validate_resolved_files(files)
        if not files:
            raise InvalidFieldException(
                softwrap(f"""
                    The `{self.alias}` field in target `{self.address}` must
                    resolve to at least one file.
                    """))

        files_outside_dirs = [f for f in files if len(PurePath(f).parts) == 1]
        if files_outside_dirs:
            raise InvalidFieldException(
                softwrap(f"""
                    The `{self.alias}` field in target `{self.address}` must be paths to
                    files in a single sources directory. Individual files
                    were found: {files_outside_dirs}
                    """))

        directory_prefixes = {PurePath(f).parts[0] for f in files}
        if len(directory_prefixes) > 1:
            raise InvalidFieldException(
                softwrap(f"""
                    The `{self.alias}` field in target `{self.address}` must be paths to
                    files in a single sources directory. Multiple directories
                    were found: {directory_prefixes}
                    """))
예제 #2
0
class PyOxidizerDependenciesField(Dependencies):
    required = True
    supports_transitive_excludes = True
    help = softwrap(f"""
        The addresses of `python_distribution` target(s) to include in the binary, e.g.
        `['src/python/project:dist']`.

        The distribution(s) must generate at least one wheel file. For example, if using
        `{GenerateSetupField.alias}=True`, then make sure `{WheelField.alias}=True`. See
        {doc_url('python-distributions')}.

        Usually, you only need to specify a single `python_distribution`. However, if
        that distribution depends on another first-party distribution in your repository, you
        must specify that dependency too, otherwise PyOxidizer would try installing the
        distribution from PyPI. Note that a `python_distribution` target might depend on
        another `python_distribution` target even if it is not included in its own `dependencies`
        field, as explained at {doc_url('python-distributions')}; if code from one distribution
        imports code from another distribution, then there is a dependency and you must
        include both `python_distribution` targets in the `dependencies` field of this
        `pyoxidizer_binary` target.

        Target types other than `python_distribution` will be ignored.
        """)
예제 #3
0
def test_py_constraints(empty_pyproject_toml: PyProjectToml) -> None:
    def assert_py_constraints(py_req: str, suffix: str) -> None:
        attr = PyprojectAttr({"version": "1.2.3", "python": py_req})
        assert handle_dict_attr(
            "foo", attr, empty_pyproject_toml) == f"foo ==1.2.3;{suffix}"

    assert_py_constraints("3.6", "(python_version == '3.6')")
    assert_py_constraints(
        "3.6 || 3.7",
        "((python_version == '3.6') or (python_version == '3.7'))")
    assert_py_constraints(
        ">3.6,!=3.7", "(python_version > '3.6' and python_version != '3.7')")
    assert_py_constraints(
        ">3.6 || 3.5,3.4",
        "((python_version > '3.6') or (python_version == '3.5' and python_version == '3.4'))",
    )
    assert_py_constraints(
        "~3.6 || ^3.7",
        softwrap("""
            ((python_version >= '3.6' and python_version< '3.7') or
            (python_version >= '3.7' and python_version< '4.0'))
            """),
    )
예제 #4
0
class CliOptions(Subsystem):
    options_scope = "cli"
    help = "Options for configuring CLI behavior, such as command line aliases."

    alias = DictOption[str](
        "--alias",
        help=softwrap(f"""
            Register command line aliases.

            Example:

                [cli.alias]
                green = "fmt lint check"
                all-changed = "--changed-since=HEAD --changed-dependees=transitive"


            This would allow you to run `{bin_name()} green all-changed`, which is shorthand for
            `{bin_name()} fmt lint check --changed-since=HEAD --changed-dependees=transitive`.

            Notice: this option must be placed in a config file (e.g. `pants.toml` or `pantsrc`)
            to have any effect.
            """),
    )
예제 #5
0
class Shunit2TestTarget(Target):
    alias = "shunit2_test"
    core_fields = (
        *COMMON_TARGET_FIELDS,
        Shunit2TestSourceField,
        Shunit2TestDependenciesField,
        Shunit2TestTimeoutField,
        SkipShunit2TestsField,
        Shunit2ShellField,
        RuntimePackageDependenciesField,
    )
    help = softwrap(f"""
        A single test file for Bourne-based shell scripts using the shunit2 test framework.

        To use, add tests to your file per https://github.com/kward/shunit2/. Specify the shell
        to run with by either setting the field `{Shunit2ShellField.alias}` or including a
        shebang. To test the same file with multiple shells, create multiple `shunit2_tests`
        targets, one for each shell.

        Pants will automatically download the `shunit2` bash script and add
        `source ./shunit2` to your test for you. If you already have `source ./shunit2`,
        Pants will overwrite it to use the correct relative path.
        """)
예제 #6
0
class PyOxidizerTarget(Target):
    alias = "pyoxidizer_binary"
    core_fields = (
        *COMMON_TARGET_FIELDS,
        PyOxidizerOutputPathField,
        PyOxidizerConfigSourceField,
        PyOxidizerDependenciesField,
        PyOxidizerEntryPointField,
        PyOxidizerUnclassifiedResources,
    )
    help = softwrap(f"""
        A single-file Python executable with a Python interpreter embedded, built via PyOxidizer.

        To use this target, first create a `python_distribution` target with the code you want
        included in your binary, per {doc_url('python-distributions')}. Then add this
        `python_distribution` target to the `dependencies` field. See the `help` for
        `dependencies` for more information.

        You may optionally want to set the `{PyOxidizerEntryPointField.alias}` field. For
        advanced use cases, you can use a custom PyOxidizer config file, rather than what Pants
        generates, by setting the `{PyOxidizerConfigSourceField.alias}` field. You may also want
        to set `[pyoxidizer].args` to a value like `['--release']`.
        """)
예제 #7
0
    def __init__(self, **kwargs):
        """
        :param kwargs: Passed to `setuptools.setup
          <https://pythonhosted.org/setuptools/setuptools.html>`_.
        """
        if "name" not in kwargs:
            raise ValueError("`setup_py()` requires `name` to be specified.")
        name = kwargs["name"]
        if not isinstance(name, str):
            raise ValueError(
                softwrap(f"""
                    The `name` in `setup_py()` must be a string, but was {repr(name)} with type
                    {type(name)}.
                    """))

        if "entry_points" in kwargs:
            # coerce entry points from Dict[str, List[str]] to Dict[str, Dict[str, str]]
            kwargs["entry_points"] = _normalize_entry_points(
                kwargs["entry_points"])

        self._kw: Dict[str, Any] = kwargs
        self._binaries = {}
        self._name: str = name
예제 #8
0
    def __init__(
        self, kwargs: Mapping[str, Any], *, address: Address, _allow_banned_keys: bool = False
    ) -> None:
        super().__init__()
        if "name" not in kwargs:
            raise InvalidSetupPyArgs(
                f"Missing a `name` kwarg in the `provides` field for {address}."
            )
        if "version" not in kwargs:
            raise InvalidSetupPyArgs(
                f"Missing a `version` kwarg in the `provides` field for {address}."
            )

        if not _allow_banned_keys:
            for arg in {
                "data_files",
                "install_requires",
                "namespace_packages",
                "package_data",
                "package_dir",
                "packages",
            }:
                if arg in kwargs:
                    raise ValueError(
                        softwrap(
                            f"""
                            {arg} cannot be set in the `provides` field for {address}, but it was
                            set to {kwargs[arg]}. Pants will dynamically set the value for you.
                            """
                        )
                    )

        # We serialize with `pickle` so that is hashable. We don't use `FrozenDict` because it
        # would require that all values are immutable, and we may have lists and dictionaries as
        # values. It's too difficult/clunky to convert those all, then to convert them back out of
        # `FrozenDict`. We don't use JSON because it does not preserve data types like `tuple`.
        self._pickled_bytes = pickle.dumps({k: v for k, v in sorted(kwargs.items())}, protocol=4)
예제 #9
0
def parse_single_dependency(
    proj_name: str,
    attributes: str | Mapping[str, str | Sequence] | Sequence[Mapping[str, str | Sequence]],
    pyproject_toml: PyProjectToml,
) -> Iterator[PipRequirement]:

    if isinstance(attributes, str):
        # E.g. `foo = "~1.1~'.
        yield PipRequirement.parse(
            parse_str_version(
                attributes,
                proj_name=proj_name,
                file_path=str(pyproject_toml.toml_relpath),
                extras_str="",
            )
        )
    elif isinstance(attributes, dict):
        # E.g. `foo = {version = "~1.1"}`.
        pyproject_attr = cast(PyprojectAttr, attributes)
        req_str = handle_dict_attr(proj_name, pyproject_attr, pyproject_toml)
        if req_str:
            yield PipRequirement.parse(req_str)
    elif isinstance(attributes, list):
        # E.g. ` foo = [{version = "1.1","python" = "2.7"}, {version = "1.1","python" = "2.7"}]
        for attr in attributes:
            req_str = handle_dict_attr(proj_name, attr, pyproject_toml)
            if req_str:
                yield PipRequirement.parse(req_str)
    else:
        raise AssertionError(
            softwrap(
                f"""
                Error: invalid Poetry requirement format. Expected type of requirement attributes to
                be string, dict, or list, but was of type {type(attributes).__name__}.
                """
            )
        )
예제 #10
0
async def infer_thrift_dependencies(
        request: InferThriftDependencies, thrift_mapping: ThriftMapping,
        thrift: ThriftSubsystem) -> InferredDependencies:
    if not thrift.dependency_inference:
        return InferredDependencies([])

    address = request.sources_field.address
    wrapped_tgt = await Get(
        WrappedTarget,
        WrappedTargetRequest(address, description_of_origin="<infallible>"))
    explicitly_provided_deps, parsed_thrift = await MultiGet(
        Get(ExplicitlyProvidedDependencies,
            DependenciesRequest(wrapped_tgt.target[Dependencies])),
        Get(ParsedThrift, ParsedThriftRequest(request.sources_field)),
    )

    result: OrderedSet[Address] = OrderedSet()
    for import_path in parsed_thrift.imports:
        unambiguous = thrift_mapping.mapping.get(import_path)
        ambiguous = thrift_mapping.ambiguous_modules.get(import_path)
        if unambiguous:
            result.add(unambiguous)
        elif ambiguous:
            explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference(
                ambiguous,
                address,
                import_reference="file",
                context=softwrap(f"""
                    The target {address} imports `{import_path}` in the file
                    {wrapped_tgt.target[ThriftSourceField].file_path}
                    """),
            )
            maybe_disambiguated = explicitly_provided_deps.disambiguated(
                ambiguous)
            if maybe_disambiguated:
                result.add(maybe_disambiguated)
    return InferredDependencies(sorted(result))
예제 #11
0
async def run_pyoxidizer_binary(field_set: PyOxidizerFieldSet) -> RunRequest:
    def is_executable_binary(artifact_relpath: str | None) -> bool:
        """After packaging, the PyOxidizer plugin will place the executable in a location like this:
        dist/{project}/{target_name}/{platform arch}/{compilation mode}/install/{binary name}

        {binary name} will default to `target_name`, but can be modified with a custom PyOxidizer template.

        e.g. dist/helloworld/helloworld-bin/x86_64-apple-darwin/debug/install/helloworld-bin.

        PyOxidizer will place associated libraries in {...}/install/lib

        To determine if the artifact we iterate over is the one we want to execute, we check that
        the file's parent dir is "install". There should only be one of these files.
        """
        if not artifact_relpath:
            return False

        artifact_path = PurePath(artifact_relpath)
        return artifact_path.parent.name == "install"

    binary = await Get(BuiltPackage, PackageFieldSet, field_set)
    executable_binaries = [
        artifact for artifact in binary.artifacts
        if is_executable_binary(artifact.relpath)
    ]

    assert len(executable_binaries) == 1, softwrap(f"""
        More than one executable binary discovered in the `install` directory,
        which is a bug in the PyOxidizer plugin.
        Please file a bug report at https://github.com/pantsbuild/pants/issues/new.
        Enumerated executable binaries: {executable_binaries}
        """)

    artifact = executable_binaries[0]
    assert artifact.relpath is not None
    return RunRequest(digest=binary.digest,
                      args=(os.path.join("{chroot}", artifact.relpath), ))
예제 #12
0
class PythonAwsLambdaHandlerField(StringField, AsyncFieldMixin, SecondaryOwnerMixin):
    alias = "handler"
    required = True
    value: str
    help = softwrap(
        """
        Entry point to the AWS Lambda handler.

        You can specify a full module like 'path.to.module:handler_func' or use a shorthand to
        specify a file name, using the same syntax as the `sources` field, e.g.
        'lambda.py:handler_func'.

        You must use the file name shorthand for file arguments to work with this target.
        """
    )

    @classmethod
    def compute_value(cls, raw_value: Optional[str], address: Address) -> str:
        value = cast(str, super().compute_value(raw_value, address))
        if ":" not in value:
            raise InvalidFieldException(
                softwrap(
                    f"""
                    The `{cls.alias}` field in target at {address} must end in the format
                    `:my_handler_func`, but was {value}.
                    """
                )
            )
        return value

    @property
    def filespec(self) -> Filespec:
        path, _, func = self.value.partition(":")
        if not path.endswith(".py"):
            return {"includes": []}
        full_glob = os.path.join(self.address.spec_path, path)
        return {"includes": [full_glob]}
예제 #13
0
class Changed(Subsystem):
    options_scope = "changed"
    help = softwrap(f"""
        Tell Pants to detect what files and targets have changed from Git.

        See {doc_url('advanced-target-selection')}.
        """)

    since = StrOption(
        "--since",
        default=None,
        help="Calculate changes since this Git spec (commit range/SHA/ref).",
    )
    diffspec = StrOption(
        "--diffspec",
        default=None,
        help=
        "Calculate changes contained within a given Git spec (commit range/SHA/ref).",
    )
    dependees = EnumOption(
        "--dependees",
        default=DependeesOption.NONE,
        help="Include direct or transitive dependees of changed targets.",
    )
예제 #14
0
class RequirementsOverrideField(OverridesField):
    help = softwrap("""
        Override the field values for generated `python_requirement` targets.

        Expects a dictionary of requirements to a dictionary for the
        overrides. You may either use a string for a single requirement,
        or a string tuple for multiple requirements. Each override is a dictionary of
        field names to the overridden value.

        For example:

            ```
            overrides={
                "django": {"dependencies": ["#setuptools"]]},
                "ansicolors": {"description": "pretty colors"]},
                ("ansicolors, "django"): {"tags": ["overridden"]},
            }
            ```

        Every overridden requirement is validated to be generated by this target.

        You can specify the same requirement in multiple keys, so long as you don't
        override the same field more than one time for the requirement.
        """)
예제 #15
0
 def __init__(
     self,
     req_strings: Iterable[str] = (),
     *,
     constraints_strings: Iterable[str] = (),
     from_superset: Pex | LoadedLockfile | None = None,
 ) -> None:
     """
     :param req_strings: The requirement strings to resolve.
     :param constraints_strings: Constraints strings to apply during the resolve.
     :param from_superset: An optional superset PEX or lockfile to resolve the req strings from.
     """
     self.req_strings = FrozenOrderedSet(sorted(req_strings))
     self.constraints_strings = FrozenOrderedSet(sorted(constraints_strings))
     if isinstance(from_superset, LoadedLockfile) and not from_superset.is_pex_native:
         raise ValueError(
             softwrap(
                 f"""
                 The lockfile {from_superset.original_lockfile} was not in PEX's
                 native format, and so cannot be directly used as a superset.
                 """
             )
         )
     self.from_superset = from_superset
예제 #16
0
class JvmResolveField(StringField, AsyncFieldMixin):
    alias = "resolve"
    required = False
    help = softwrap(
        """
        The resolve from `[jvm].resolves` to use when compiling this target.

        If not defined, will default to `[jvm].default_resolve`.
        """

        # TODO: Document expectations for dependencies once we validate that.
    )

    def normalized_value(self, jvm_subsystem: JvmSubsystem) -> str:
        """Get the value after applying the default and validating that the key is recognized."""
        resolve = self.value or jvm_subsystem.default_resolve
        if resolve not in jvm_subsystem.resolves:
            raise UnrecognizedResolveNamesError(
                [resolve],
                jvm_subsystem.resolves.keys(),
                description_of_origin=
                f"the field `{self.alias}` in the target {self.address}",
            )
        return resolve
예제 #17
0
class SubprocessEnvironment(Subsystem):
    options_scope = "subprocess-environment"
    help = "Environment settings for forked subprocesses."

    _env_vars = StrListOption(
        "--env-vars",
        default=["LANG", "LC_CTYPE", "LC_ALL", "SSL_CERT_FILE", "SSL_CERT_DIR"],
        help=softwrap(
            f"""
            Environment variables to set for process invocations.

            Entries are either strings in the form `ENV_VAR=value` to set an explicit value;
            or just `ENV_VAR` to copy the value from Pants's own environment.

            See {doc_url('options#addremove-semantics')} for how to add and remove Pants's
            default for this option.
            """
        ),
        advanced=True,
    )

    @property
    def env_vars_to_pass_to_subprocesses(self) -> Tuple[str, ...]:
        return tuple(sorted(set(self._env_vars)))
예제 #18
0
class KotlinSubsystem(Subsystem):
    options_scope = "kotlin"
    name = "kotlin"
    help = "The Kotlin programming language (https://kotlinlang.org/)."

    _version_for_resolve = DictOption[str](
        "--version-for-resolve",
        help=softwrap("""
            A dictionary mapping the name of a resolve to the Kotlin version to use for all Kotlin
            targets consuming that resolve.
            """),
    )
    tailor_source_targets = BoolOption(
        "--tailor-source-targets",
        default=True,
        help="If true, add `kotlin_sources` targets with the `tailor` goal.",
        advanced=True,
    )

    def version_for_resolve(self, resolve: str) -> str:
        version = self._version_for_resolve.get(resolve)
        if version:
            return version
        return DEFAULT_KOTLIN_VERSION
예제 #19
0
class PythonRequirementsTargetGenerator(TargetGenerator):
    alias = "python_requirements"
    help = softwrap("""
        Generate a `python_requirement` for each entry in a requirements.txt-style file from the
        `source` field.

        This works with pip-style requirements files:
        https://pip.pypa.io/en/latest/reference/requirements-file-format/. However, pip options
        like `--hash` are (for now) ignored.

        Pants will not follow `-r reqs.txt` lines. Instead, add a dedicated `python_requirements`
        target generator for that additional requirements file.
        """)
    generated_target_cls = PythonRequirementTarget
    # Note that this does not have a `dependencies` field.
    core_fields = (
        *COMMON_TARGET_FIELDS,
        ModuleMappingField,
        TypeStubsModuleMappingField,
        PythonRequirementsSourceField,
        RequirementsOverrideField,
    )
    copied_fields = COMMON_TARGET_FIELDS
    moved_fields = (PythonRequirementResolveField, )
예제 #20
0
class GoogleJavaFormatSubsystem(JvmToolBase):
    options_scope = "google-java-format"
    name = "Google Java Format"
    help = "Google Java Format (https://github.com/google/google-java-format)"

    default_version = "1.13.0"
    default_artifacts = (
        "com.google.googlejavaformat:google-java-format:{version}", )
    default_lockfile_resource = (
        "pants.backend.java.lint.google_java_format",
        "google_java_format.default.lockfile.txt",
    )
    default_lockfile_path = "src/python/pants/backend/java/lint/google_java_format/google_java_format.default.lockfile.txt"
    default_lockfile_url = git_url(default_lockfile_path)

    skip = SkipOption("fmt", "lint")
    aosp = BoolOption(
        "--aosp",
        default=False,
        help=softwrap("""
            Use AOSP style instead of Google Style (4-space indentation).
            ("AOSP" is the Android Open Source Project.)
            """),
    )
예제 #21
0
def check_pants_wheels_present(check_dir: str | Path) -> None:
    banner(f"Checking prebuilt wheels for {CONSTANTS.pants_unstable_version}")
    missing_packages = []
    for package in PACKAGES:
        local_files = package.find_locally(
            version=CONSTANTS.pants_unstable_version, search_dir=check_dir)
        if not local_files:
            missing_packages.append(package.name)
            continue
        if is_cross_platform(local_files) and len(local_files) != 7:
            formatted_local_files = "\n    ".join(
                sorted(f.name for f in local_files))
            missing_packages.append(
                softwrap(f"""
                    {package.name}. Expected 7 wheels ({{cp37m, cp38, cp39}} x
                    {{macosx-x86_64, linux-x86_64}} + cp39-macosx),
                    but found {len(local_files)}:\n    {formatted_local_files}
                    """))
    if missing_packages:
        formatted_missing = "\n  ".join(missing_packages)
        die(f"Failed to find prebuilt wheels:\n  {formatted_missing}")
    green(
        f"All {len(PACKAGES)} pantsbuild.pants packages were fetched and are valid."
    )
예제 #22
0
 def __new__(
     cls,
     *,
     example: str,
     extra_help: str = "",
     tool_name: str | None = None,
     # This should be set when callers can alternatively use "--" followed by the arguments,
     # instead of having to provide "--[scope]-args='--arg1 --arg2'".
     passthrough: bool | None = None,
     flag_name: str = "--args",
 ):
     if extra_help:
         extra_help = "\n\n" + extra_help
     instance = super().__new__(
         cls,  # type: ignore[arg-type]
         flag_name,
         help=(lambda subsystem_cls: softwrap(f"""
                 Arguments to pass directly to {tool_name or subsystem_cls.name},
                 e.g. `--{subsystem_cls.options_scope}-args='{example}'`.{extra_help}
                 """)),
     )
     if passthrough is not None:
         instance._extra_kwargs["passthrough"] = passthrough
     return instance
예제 #23
0
async def resolve_address(address_input: AddressInput) -> Address:
    # Determine the type of the path_component of the input.
    if address_input.path_component:
        paths = await Get(Paths,
                          PathGlobs(globs=(address_input.path_component, )))
        is_file, is_dir = bool(paths.files), bool(paths.dirs)
    else:
        # It is an address in the root directory.
        is_file, is_dir = False, True

    if is_file:
        return address_input.file_to_address()
    elif is_dir:
        return address_input.dir_to_address()
    else:
        spec = address_input.path_component
        if address_input.target_component:
            spec += f":{address_input.target_component}"
        raise ResolveError(
            softwrap(f"""
                The file or directory '{address_input.path_component}' does not exist on disk in
                the workspace, so the address '{spec}' from {address_input.description_of_origin}
                cannot be resolved.
                """))
예제 #24
0
class DockerImageBuildSecretsOptionField(AsyncFieldMixin,
                                         DockerBuildOptionFieldMixin,
                                         DictStringToStringField):
    alias = "secrets"
    help = softwrap("""
        Secret files to expose to the build (only if BuildKit enabled).

        Secrets may use absolute paths, or paths relative to your build root, or the BUILD file
        if prefixed with `./`. The id should be valid as used by the Docker build `--secret`
        option. See [Docker secrets](https://docs.docker.com/engine/swarm/secrets/) for more
        information.

        Example:

            docker_image(
                secrets={
                    "mysecret": "/var/secrets/some-secret",
                    "repo-secret": "src/proj/secrets/some-secret",
                    "target-secret": "./secrets/some-secret",
                }
            )
        """)

    docker_build_option = "--secret"

    def option_values(self, **kwargs) -> Iterator[str]:
        # os.path.join() discards preceding parts if encountering an abs path, e.g. if the secret
        # `path` is an absolute path, the `buildroot` and `spec_path` will not be considered.  Also,
        # an empty path part is ignored.
        for secret, path in (self.value or {}).items():
            full_path = os.path.join(
                get_buildroot(),
                self.address.spec_path if re.match(r"\.{1,2}/", path) else "",
                path,
            )
            yield f"id={secret},src={os.path.normpath(full_path)}"
예제 #25
0
class ScroogeScalaSubsystem(Subsystem):
    options_scope = "scala-scrooge"
    help = "Scala-specific options for the Scrooge Thrift IDL compiler (https://twitter.github.io/scrooge/)."

    _runtime_dependencies = TargetListOption(
        "--runtime-dependencies",
        help=softwrap(f"""
            A list of addresses to `jvm_artifact` targets for the runtime
            dependencies needed for generated Scala code to work. For example,
            `['3rdparty/jvm:scrooge-runtime']`. These dependencies will
            be automatically added to every `thrift_source` target. At the very least,
            this option must be set to a `jvm_artifact` for the
            `com.twitter:scrooge-runtime_SCALAVER:{ScroogeSubsystem.default_version}` runtime library.
            """),
    )

    @property
    def runtime_dependencies(self) -> UnparsedAddressInputs:
        return UnparsedAddressInputs(
            self._runtime_dependencies,
            owning_address=None,
            description_of_origin=
            f"the option `[{self.options_scope}].runtime_dependencies`",
        )
예제 #26
0
class PythonInferSubsystem(Subsystem):
    options_scope = "python-infer"
    help = "Options controlling which dependencies will be inferred for Python targets."

    imports = BoolOption(
        "--imports",
        default=True,
        help=softwrap(
            """
            Infer a target's imported dependencies by parsing import statements from sources.

            To ignore a false positive, you can either put `# pants: no-infer-dep` on the line of
            the import or put `!{bad_address}` in the `dependencies` field of your target.
            """
        ),
    )
    string_imports = BoolOption(
        "--string-imports",
        default=False,
        help=softwrap(
            """
            Infer a target's dependencies based on strings that look like dynamic
            dependencies, such as Django settings files expressing dependencies as strings.

            To ignore a false positive, you can either put `# pants: no-infer-dep` on the line of
            the string or put `!{bad_address}` in the `dependencies` field of your target.
            """
        ),
    )
    string_imports_min_dots = IntOption(
        "--string-imports-min-dots",
        default=2,
        help=softwrap(
            """
            If --string-imports is True, treat valid-looking strings with at least this many
            dots in them as potential dynamic dependencies. E.g., `'foo.bar.Baz'` will be
            treated as a potential dependency if this option is set to 2 but not if set to 3.
            """
        ),
    )
    assets = BoolOption(
        "--assets",
        default=False,
        help=softwrap(
            """
            Infer a target's asset dependencies based on strings that look like Posix
            filepaths, such as those given to `open` or `pkgutil.get_data`.

            To ignore a false positive, you can either put `# pants: no-infer-dep` on the line of
            the string or put `!{bad_address}` in the `dependencies` field of your target.
            """
        ),
    )
    assets_min_slashes = IntOption(
        "--assets-min-slashes",
        default=1,
        help=softwrap(
            """
            If --assets is True, treat valid-looking strings with at least this many forward
            slash characters as potential assets. E.g. `'data/databases/prod.db'` will be
            treated as a potential candidate if this option is set to 2 but not to 3.
            """
        ),
    )

    init_files = EnumOption(
        "--init-files",
        help=softwrap(
            f"""
            Infer a target's dependencies on any `__init__.py` files in the packages
            it is located in (recursively upward in the directory structure).

            Even if this is set to `never` or `content_only`, Pants will still always include any
            ancestor `__init__.py` files in the sandbox. Only, they will not be "proper"
            dependencies, e.g. they will not show up in `{bin_name()} dependencies` and their own
            dependencies will not be used.

            By default, Pants only adds a "proper" dependency if there is content in the
            `__init__.py` file. This makes sure that dependencies are added when likely necessary
            to build, while also avoiding adding unnecessary dependencies. While accurate, those
            unnecessary dependencies can complicate setting metadata like the
            `interpreter_constraints` and `resolve` fields.
            """
        ),
        default=InitFilesInference.content_only,
    )
    inits = BoolOption(
        "--inits",
        default=False,
        help=softwrap(
            f"""
            Infer a target's dependencies on any `__init__.py` files in the packages
            it is located in (recursively upward in the directory structure).

            Even if this is disabled, Pants will still include any ancestor `__init__.py` files,
            only they will not be 'proper' dependencies, e.g. they will not show up in
            `{bin_name()} dependencies` and their own dependencies will not be used.

            If you have empty `__init__.py` files, it's safe to leave this option off; otherwise,
            you should enable this option.
            """
        ),
        removal_version="2.14.0.dev1",
        removal_hint=softwrap(
            """
            Use the more powerful option `[python-infer].init_files`. For identical
            behavior, set to 'always'. Otherwise, we recommend the default of `content_only`
            (simply delete the option `[python-infer].inits` to trigger the default).
            """
        ),
    )

    conftests = BoolOption(
        "--conftests",
        default=True,
        help=softwrap(
            """
            Infer a test target's dependencies on any conftest.py files in the current
            directory and ancestor directories.
            """
        ),
    )
    entry_points = BoolOption(
        "--entry-points",
        default=True,
        help=softwrap(
            """
            Infer dependencies on targets' entry points, e.g. `pex_binary`'s
            `entry_point` field, `python_awslambda`'s `handler` field and
            `python_distribution`'s `entry_points` field.
            """
        ),
    )
    unowned_dependency_behavior = EnumOption(
        "--unowned-dependency-behavior",
        default=UnownedDependencyUsage.DoNothing,
        help=softwrap(
            """
            How to handle imports that don't have an inferrable owner.

            Usually when an import cannot be inferred, it represents an issue like Pants not being
            properly configured, e.g. targets not set up. Often, missing dependencies will result
            in confusing runtime errors like `ModuleNotFoundError`, so this option can be helpful
            to error more eagerly.

            To ignore any false positives, either add `# pants: no-infer-dep` to the line of the
            import or put the import inside a `try: except ImportError:` block.
            """
        ),
    )
예제 #27
0
async def _handle_unowned_imports(
    address: Address,
    unowned_dependency_behavior: UnownedDependencyUsage,
    python_setup: PythonSetup,
    unowned_imports: Iterable[str],
    parsed_imports: ParsedPythonImports,
    resolve: str,
) -> None:
    if not unowned_imports or unowned_dependency_behavior is UnownedDependencyUsage.DoNothing:
        return

    other_resolves_snippet = ""
    if len(python_setup.resolves) > 1:
        other_owners_from_other_resolves = await MultiGet(
            Get(PythonModuleOwners, PythonModuleOwnersRequest(imported_module, resolve=None))
            for imported_module in unowned_imports
        )
        other_owners_as_targets = await MultiGet(
            Get(Targets, Addresses(owners.unambiguous + owners.ambiguous))
            for owners in other_owners_from_other_resolves
        )

        imports_to_other_owners: DefaultDict[str, list[tuple[Address, ResolveName]]] = defaultdict(
            list
        )
        for imported_module, targets in zip(unowned_imports, other_owners_as_targets):
            for t in targets:
                other_owner_resolve = t[PythonResolveField].normalized_value(python_setup)
                if other_owner_resolve != resolve:
                    imports_to_other_owners[imported_module].append(
                        (t.address, other_owner_resolve)
                    )

        if imports_to_other_owners:
            other_resolves_lines = []
            for import_module, other_owners in sorted(imports_to_other_owners.items()):
                owners_txt = ", ".join(
                    f"'{other_resolve}' from {addr}" for addr, other_resolve in sorted(other_owners)
                )
                other_resolves_lines.append(f"{import_module}: {owners_txt}")
            other_resolves_snippet = "\n\n" + softwrap(
                f"""
                These imports are not in the resolve used by the target (`{resolve}`), but they
                were present in other resolves:

                {bullet_list(other_resolves_lines)}\n\n
                """
            )

    unowned_imports_with_lines = [
        f"{module_name} (line: {parsed_imports[module_name].lineno})"
        for module_name in sorted(unowned_imports)
    ]

    msg = softwrap(
        f"""
        Pants cannot infer owners for the following imports in the target {address}:

        {bullet_list(unowned_imports_with_lines)}{other_resolves_snippet}

        If you do not expect an import to be inferrable, add `# pants: no-infer-dep` to the
        import line. Otherwise, see
        {doc_url('troubleshooting#import-errors-and-missing-dependencies')} for common problems.
        """
    )
    if unowned_dependency_behavior is UnownedDependencyUsage.LogWarning:
        logger.warning(msg)
    else:
        raise UnownedDependencyError(msg)
예제 #28
0
class DockerOptions(Subsystem):
    options_scope = "docker"
    help = "Options for interacting with Docker."

    _registries = DictOption[Any](
        "--registries",
        help=softwrap("""
            Configure Docker registries. The schema for a registry entry is as follows:

                {
                    "registry-alias": {
                        "address": "registry-domain:port",
                        "default": bool,
                    },
                    ...
                }

            If no registries are provided in a `docker_image` target, then all default
            addresses will be used, if any.

            The `docker_image.registries` may be provided with a list of registry addresses
            and registry aliases prefixed with `@` to be used instead of the defaults.

            A configured registry is marked as default either by setting `default = true`
            or with an alias of `"default"`.
            """),
        fromfile=True,
    )
    default_repository = StrOption(
        "--default-repository",
        help=(softwrap(f"""
                Configure the default repository name used in the Docker image tag.

                The value is formatted and may reference these variables (in addition to the normal
                placeheolders derived from the Dockerfile and build args etc):

                {bullet_list(["name", "directory", "parent_directory"])}

                Example: `--default-repository="{{directory}}/{{name}}"`.

                The `name` variable is the `docker_image`'s target name, `directory` and
                `parent_directory` are the name of the directory in which the BUILD file is for the
                target, and its parent directory respectively.

                Use the `repository` field to set this value directly on a `docker_image` target.

                Any registries or tags are added to the image name as required, and should
                not be part of the repository name.
                """)),
        default="{name}",
    )
    default_context_root = WorkspacePathOption(
        "--default-context-root",
        default="",
        help=softwrap("""
            Provide a default Docker build context root path for `docker_image` targets that
            does not specify their own `context_root` field.

            The context root is relative to the build root by default, but may be prefixed
            with `./` to be relative to the directory of the BUILD file of the `docker_image`.

            Examples:

                --default-context-root=src/docker
                --default-context-root=./relative_to_the_build_file
            """),
    )
    _build_args = ShellStrListOption(
        "--build-args",
        help=softwrap(f"""
            Global build arguments (for Docker `--build-arg` options) to use for all
            `docker build` invocations.

            Entries are either strings in the form `ARG_NAME=value` to set an explicit value;
            or just `ARG_NAME` to copy the value from Pants's own environment.

            Example:

                [{options_scope}]
                build_args = ["VAR1=value", "VAR2"]


            Use the `extra_build_args` field on a `docker_image` target for additional
            image specific build arguments.
            """),
    )
    build_target_stage = StrOption(
        "--build-target-stage",
        default=None,
        help=softwrap("""
            Global default value for `target_stage` on `docker_image` targets, overriding
            the field value on the targets, if there is a matching stage in the `Dockerfile`.

            This is useful to provide from the command line, to specify the target stage to
            build for at execution time.
            """),
    )
    build_verbose = BoolOption(
        "--build-verbose",
        default=False,
        help=
        "Whether to log the Docker output to the console. If false, only the image ID is logged.",
    )
    _env_vars = ShellStrListOption(
        "--env-vars",
        help=softwrap("""
            Environment variables to set for `docker` invocations.

            Entries are either strings in the form `ENV_VAR=value` to set an explicit value;
            or just `ENV_VAR` to copy the value from Pants's own environment.
            """),
        advanced=True,
    )
    run_args = ShellStrListOption(
        "--run-args",
        default=["--interactive", "--tty"] if sys.stdout.isatty() else [],
        help=softwrap(f"""
            Additional arguments to use for `docker run` invocations.

            Example:

                $ {bin_name()} run --{options_scope}-run-args="-p 127.0.0.1:80:8080/tcp\
                    --name demo" src/example:image -- [image entrypoint args]

            To provide the top-level options to the `docker` client, use
            `[{options_scope}].env_vars` to configure the
            [Environment variables]({doc_links['docker_env_vars']}) as appropriate.

            The arguments for the image entrypoint may be passed on the command line after a
            double dash (`--`), or using the `--run-args` option.

            Defaults to `--interactive --tty` when stdout is connected to a terminal.
            """),
    )
    _executable_search_paths = StrListOption(
        "--executable-search-paths",
        default=["<PATH>"],
        help=softwrap("""
            The PATH value that will be used to find the Docker client and any tools required.

            The special string `"<PATH>"` will expand to the contents of the PATH env var.
            """),
        advanced=True,
        metavar="<binary-paths>",
    )
    _tools = StrListOption(
        "--tools",
        default=[],
        help=softwrap("""
            List any additional executable tools required for Docker to work. The paths to
            these tools will be included in the PATH used in the execution sandbox, so that
            they may be used by the Docker client.
            """),
        advanced=True,
    )

    @property
    def build_args(self) -> tuple[str, ...]:
        return tuple(sorted(set(self._build_args)))

    @property
    def env_vars(self) -> tuple[str, ...]:
        return tuple(sorted(set(self._env_vars)))

    @property
    def tools(self) -> tuple[str, ...]:
        return tuple(sorted(set(self._tools)))

    @memoized_method
    def registries(self) -> DockerRegistries:
        return DockerRegistries.from_dict(self._registries)

    @memoized_method
    def executable_search_path(self, env: Environment) -> tuple[str, ...]:
        def iter_path_entries():
            for entry in self._executable_search_paths:
                if entry == "<PATH>":
                    path = env.get("PATH")
                    if path:
                        yield from path.split(os.pathsep)
                else:
                    yield entry

        return tuple(OrderedSet(iter_path_entries()))
예제 #29
0
async def inject_python_distribution_dependencies(
    request: InjectPythonDistributionDependencies, python_infer_subsystem: PythonInferSubsystem
) -> InjectedDependencies:
    """Inject dependencies that we can infer from entry points in the distribution."""
    if not python_infer_subsystem.entry_points:
        return InjectedDependencies()

    original_tgt = await Get(
        WrappedTarget,
        WrappedTargetRequest(
            request.dependencies_field.address, description_of_origin="<infallible>"
        ),
    )
    explicitly_provided_deps, distribution_entry_points, provides_entry_points = await MultiGet(
        Get(ExplicitlyProvidedDependencies, DependenciesRequest(original_tgt.target[Dependencies])),
        Get(
            ResolvedPythonDistributionEntryPoints,
            ResolvePythonDistributionEntryPointsRequest(
                entry_points_field=original_tgt.target[PythonDistributionEntryPointsField]
            ),
        ),
        Get(
            ResolvedPythonDistributionEntryPoints,
            ResolvePythonDistributionEntryPointsRequest(
                provides_field=original_tgt.target[PythonProvidesField]
            ),
        ),
    )

    address = original_tgt.target.address
    all_module_entry_points = [
        (category, name, entry_point)
        for category, entry_points in chain(
            distribution_entry_points.explicit_modules.items(),
            provides_entry_points.explicit_modules.items(),
        )
        for name, entry_point in entry_points.items()
    ]
    all_module_owners = iter(
        await MultiGet(
            Get(PythonModuleOwners, PythonModuleOwnersRequest(entry_point.module, resolve=None))
            for _, _, entry_point in all_module_entry_points
        )
    )
    module_owners: OrderedSet[Address] = OrderedSet()
    for (category, name, entry_point), owners in zip(all_module_entry_points, all_module_owners):
        field_str = repr({category: {name: entry_point.spec}})
        explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference(
            owners.ambiguous,
            address,
            import_reference="module",
            context=softwrap(
                f"""
                The python_distribution target {address} has the field
                `entry_points={field_str}`, which maps to the Python module
                `{entry_point.module}`
                """
            ),
        )
        maybe_disambiguated = explicitly_provided_deps.disambiguated(owners.ambiguous)
        unambiguous_owners = owners.unambiguous or (
            (maybe_disambiguated,) if maybe_disambiguated else ()
        )
        module_owners.update(unambiguous_owners)

    return InjectedDependencies(
        Addresses(module_owners)
        + distribution_entry_points.pex_binary_addresses
        + provides_entry_points.pex_binary_addresses
    )
예제 #30
0
async def resolve_python_distribution_entry_points(
    request: ResolvePythonDistributionEntryPointsRequest,
) -> ResolvedPythonDistributionEntryPoints:
    if request.entry_points_field:
        if request.entry_points_field.value is None:
            return ResolvedPythonDistributionEntryPoints()
        address = request.entry_points_field.address
        all_entry_points = cast(_EntryPointsDictType, request.entry_points_field.value)

    elif request.provides_field:
        address = request.provides_field.address
        provides_field_value = cast(
            _EntryPointsDictType, request.provides_field.value.kwargs.get("entry_points") or {}
        )

        if provides_field_value:
            all_entry_points = provides_field_value
        else:
            return ResolvedPythonDistributionEntryPoints()
    else:
        return ResolvedPythonDistributionEntryPoints()

    classified_entry_points = list(_classify_entry_points(all_entry_points))

    # Pick out all target addresses up front, so we can use MultiGet later.
    #
    # This calls for a bit of trickery however (using the "y_by_x" mapping dicts), so we keep track
    # of which address belongs to which entry point. I.e. the `address_by_ref` and
    # `binary_entry_point_by_address` variables.

    target_refs = [
        entry_point_str for is_target, _, _, entry_point_str in classified_entry_points if is_target
    ]

    # Intermediate step, as Get(Targets) returns a deduplicated set.. which breaks in case of
    # multiple input refs that maps to the same target.
    target_addresses = await Get(
        Addresses,
        UnparsedAddressInputs(
            target_refs,
            owning_address=address,
            description_of_origin="TODO(#14468)",
        ),
    )
    address_by_ref = dict(zip(target_refs, target_addresses))
    targets = await Get(Targets, Addresses, target_addresses)

    # Check that we only have targets with a pex entry_point field.
    for target in targets:
        if not target.has_field(PexEntryPointField):
            raise InvalidEntryPoint(
                softwrap(
                    f"""
                    All target addresses in the entry_points field must be for pex_binary targets,
                    but the target {address} includes the value {target.address}, which has the
                    target type {target.alias}.

                    Alternatively, you can use a module like "project.app:main".
                    See {doc_url('python-distributions')}.
                    """
                )
            )

    binary_entry_points = await MultiGet(
        Get(
            ResolvedPexEntryPoint,
            ResolvePexEntryPointRequest(target[PexEntryPointField]),
        )
        for target in targets
    )
    binary_entry_point_by_address = {
        target.address: entry_point for target, entry_point in zip(targets, binary_entry_points)
    }

    entry_points: DefaultDict[str, Dict[str, PythonDistributionEntryPoint]] = defaultdict(dict)

    # Parse refs/replace with resolved pex entry point, and validate console entry points have function.
    for is_target, category, name, ref in classified_entry_points:
        owner: Optional[Address] = None
        if is_target:
            owner = address_by_ref[ref]
            entry_point = binary_entry_point_by_address[owner].val
            if entry_point is None:
                logger.warning(
                    softwrap(
                        f"""
                        The entry point {name} in {category} references a pex_binary target {ref}
                        which does not set `entry_point`. Skipping.
                        """
                    )
                )
                continue
        else:
            entry_point = EntryPoint.parse(ref, f"{name} for {address} {category}")

        if category in ["console_scripts", "gui_scripts"] and not entry_point.function:
            url = "https://python-packaging.readthedocs.io/en/latest/command-line-scripts.html#the-console-scripts-entry-point"
            raise InvalidEntryPoint(
                dedent(
                    f"""\
                Every entry point in `{category}` for {address} must end in the format `:my_func`,
                but {name} set it to {entry_point.spec!r}. For example, set
                `entry_points={{"{category}": {{"{name}": "{entry_point.module}:main}} }}`.
                See {url}.
                """
                )
            )

        entry_points[category][name] = PythonDistributionEntryPoint(entry_point, owner)

    return ResolvedPythonDistributionEntryPoints(
        FrozenDict(
            {category: FrozenDict(entry_points) for category, entry_points in entry_points.items()}
        )
    )