Esempio n. 1
0
async def get_requirements(
    dep_owner: DependencyOwner,
    union_membership: UnionMembership,
    setup_py_generation: SetupPyGeneration,
) -> ExportedTargetRequirements:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([dep_owner.exported_target.target.address]),
    )
    ownable_tgts = [
        tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
    ]
    owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
    owned_by_us: Set[Target] = set()
    owned_by_others: Set[Target] = set()
    for tgt, owner in zip(ownable_tgts, owners):
        (owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)

    # Get all 3rdparty deps of our owned deps.
    #
    # Note that we need only consider requirements that are direct dependencies of our owned deps:
    # If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
    # if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
    # then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
    # will require ET.
    direct_deps_tgts = await MultiGet(
        Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
    )

    transitive_excludes: FrozenOrderedSet[Target] = FrozenOrderedSet()
    uneval_trans_excl = [
        tgt.get(Dependencies).unevaluated_transitive_excludes for tgt in transitive_targets.closure
    ]
    if uneval_trans_excl:
        nested_trans_excl = await MultiGet(
            Get(Targets, UnparsedAddressInputs, unparsed) for unparsed in uneval_trans_excl
        )
        transitive_excludes = FrozenOrderedSet(
            itertools.chain.from_iterable(excludes for excludes in nested_trans_excl)
        )

    direct_deps_chained = FrozenOrderedSet(itertools.chain.from_iterable(direct_deps_tgts))
    direct_deps_with_excl = direct_deps_chained.difference(transitive_excludes)

    reqs = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        for tgt in direct_deps_with_excl
        if tgt.has_field(PythonRequirementsField)
    )
    req_strs = list(reqs)

    # Add the requirements on any exported targets on which we depend.
    kwargs_for_exported_targets_we_depend_on = await MultiGet(
        Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
    )
    req_strs.extend(
        f"{kwargs.name}{setup_py_generation.first_party_dependency_version(kwargs.version)}"
        for kwargs in set(kwargs_for_exported_targets_we_depend_on)
    )
    return ExportedTargetRequirements(req_strs)
Esempio n. 2
0
async def generate_user_lockfile_goal(
    addresses: Addresses,
    python_setup: PythonSetup,
    workspace: Workspace,
) -> GenerateUserLockfileGoal:
    if python_setup.lockfile is None:
        logger.warning(
            "You ran `./pants generate-user-lockfile`, but `[python].experimental_lockfile` "
            "is not set. Please set this option to the path where you'd like the lockfile for "
            "your code's dependencies to live."
        )
        return GenerateUserLockfileGoal(exit_code=1)

    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(addresses))
    reqs = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        # NB: By looking at the dependencies, rather than the closure, we only generate for
        # requirements that are actually used in the project.
        for tgt in transitive_targets.dependencies
        if tgt.has_field(PythonRequirementsField)
    )

    if not reqs:
        logger.warning(
            "No third-party requirements found for the transitive closure, so a lockfile will not "
            "be generated."
        )
        return GenerateUserLockfileGoal(exit_code=0)

    result = await Get(
        PythonLockfile,
        PythonLockfileRequest(
            reqs.req_strings,
            # TODO(#12314): Use interpreter constraints from the transitive closure.
            InterpreterConstraints(python_setup.interpreter_constraints),
            resolve_name="not yet implemented",
            lockfile_dest=python_setup.lockfile,
            _description=(
                f"Generate lockfile for {pluralize(len(reqs.req_strings), 'requirement')}: "
                f"{', '.join(reqs.req_strings)}"
            ),
            # TODO(12382): Make this command actually accurate once we figure out the semantics
            #  for user lockfiles. This is currently misleading.
            _regenerate_command="./pants generate-user-lockfile ::",
        ),
    )
    workspace.write_digest(result.digest)
    logger.info(f"Wrote lockfile to {result.path}")

    return GenerateUserLockfileGoal(exit_code=0)
Esempio n. 3
0
async def get_requirements(
    dep_owner: DependencyOwner, union_membership: UnionMembership
) -> ExportedTargetRequirements:
    transitive_targets = await Get(
        TransitiveTargets, Addresses([dep_owner.exported_target.target.address])
    )

    ownable_tgts = [
        tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
    ]
    owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
    owned_by_us: Set[Target] = set()
    owned_by_others: Set[Target] = set()
    for tgt, owner in zip(ownable_tgts, owners):
        (owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)

    # Get all 3rdparty deps of our owned deps.
    #
    # Note that we need only consider requirements that are direct dependencies of our owned deps:
    # If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
    # if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
    # then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
    # will require ET.
    #
    # TODO: Note that this logic doesn't account for indirection via dep aggregator targets, of type
    #  `target`. But we don't have those in v2 (yet) anyway. Plus, as we move towards buildgen and/or
    #  stricter build graph hygiene, it makes sense to require that targets directly declare their
    #  true dependencies. Plus, in the specific realm of setup-py, since we must exclude indirect
    #  deps across exported target boundaries, it's not a big stretch to just insist that
    #  requirements must be direct deps.
    direct_deps_tgts = await MultiGet(
        Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
    )
    reqs = PexRequirements.create_from_requirement_fields(
        tgt[PythonRequirementsField]
        for tgt in itertools.chain.from_iterable(direct_deps_tgts)
        if tgt.has_field(PythonRequirementsField)
    )
    req_strs = list(reqs)

    # Add the requirements on any exported targets on which we depend.
    kwargs_for_exported_targets_we_depend_on = await MultiGet(
        Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
    )
    req_strs.extend(
        f"{kwargs.name}=={kwargs.version}"
        for kwargs in set(kwargs_for_exported_targets_we_depend_on)
    )

    return ExportedTargetRequirements(req_strs)
Esempio n. 4
0
async def setup_user_lockfile_requests(
    requested: _SpecifiedUserResolves, python_setup: PythonSetup
) -> _UserLockfileRequests:
    # First, associate all resolves with their consumers.
    all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")]))
    resolves_to_roots = defaultdict(list)
    for tgt in all_build_targets:
        if not tgt.has_field(PythonResolveField):
            continue
        tgt[PythonResolveField].validate(python_setup)
        resolve = tgt[PythonResolveField].value
        if resolve is None:
            continue
        resolves_to_roots[resolve].append(tgt.address)

    # Expand the resolves for all specified.
    transitive_targets_per_resolve = await MultiGet(
        Get(TransitiveTargets, TransitiveTargetsRequest(resolves_to_roots[resolve]))
        for resolve in requested
    )
    pex_requirements_per_resolve = []
    interpreter_constraints_per_resolve = []
    for transitive_targets in transitive_targets_per_resolve:
        req_fields = []
        ic_fields = []
        for tgt in transitive_targets.closure:
            if tgt.has_field(PythonRequirementsField):
                req_fields.append(tgt[PythonRequirementsField])
            if tgt.has_field(InterpreterConstraintsField):
                ic_fields.append(tgt[InterpreterConstraintsField])
        pex_requirements_per_resolve.append(
            PexRequirements.create_from_requirement_fields(req_fields)
        )
        interpreter_constraints_per_resolve.append(
            InterpreterConstraints.create_from_compatibility_fields(ic_fields, python_setup)
        )

    requests = (
        PythonLockfileRequest(
            requirements.req_strings,
            interpreter_constraints,
            resolve_name=resolve,
            lockfile_dest=python_setup.resolves_to_lockfiles[resolve],
        )
        for resolve, requirements, interpreter_constraints in zip(
            requested, pex_requirements_per_resolve, interpreter_constraints_per_resolve
        )
    )
    return _UserLockfileRequests(requests)
Esempio n. 5
0
async def pylint_first_party_plugins(
        pylint: Pylint) -> PylintFirstPartyPlugins:
    if not pylint.source_plugins:
        return PylintFirstPartyPlugins(FrozenOrderedSet(), FrozenOrderedSet(),
                                       EMPTY_DIGEST)

    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        pylint.source_plugins)
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))

    requirements_fields: OrderedSet[PythonRequirementsField] = OrderedSet()
    interpreter_constraints_fields: OrderedSet[
        InterpreterConstraintsField] = OrderedSet()
    for tgt in transitive_targets.closure:
        if tgt.has_field(PythonRequirementsField):
            requirements_fields.add(tgt[PythonRequirementsField])
        if tgt.has_field(InterpreterConstraintsField):
            interpreter_constraints_fields.add(
                tgt[InterpreterConstraintsField])

    # NB: Pylint source plugins must be explicitly loaded via PYTHONPATH (i.e. PEX_EXTRA_SYS_PATH).
    # The value must point to the plugin's directory, rather than to a parent's directory, because
    # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
    # not `path.to.plugin`. (This means users must have specified the parent directory as a
    # source root.)
    stripped_sources = await Get(
        StrippedPythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure))
    prefixed_sources = await Get(
        Digest,
        AddPrefix(stripped_sources.stripped_source_files.snapshot.digest,
                  PylintFirstPartyPlugins.PREFIX),
    )

    return PylintFirstPartyPlugins(
        requirement_strings=PexRequirements.create_from_requirement_fields(
            requirements_fields).req_strings,
        interpreter_constraints_fields=FrozenOrderedSet(
            interpreter_constraints_fields),
        sources_digest=prefixed_sources,
    )
Esempio n. 6
0
async def mypy_first_party_plugins(mypy: MyPy) -> MyPyFirstPartyPlugins:
    if not mypy.source_plugins:
        return MyPyFirstPartyPlugins(FrozenOrderedSet(), EMPTY_DIGEST, ())

    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        mypy.source_plugins)
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))

    requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField))

    sources = await Get(PythonSourceFiles,
                        PythonSourceFilesRequest(transitive_targets.closure))
    return MyPyFirstPartyPlugins(
        requirement_strings=requirements.req_strings,
        sources_digest=sources.source_files.snapshot.digest,
        source_roots=sources.source_roots,
    )
Esempio n. 7
0
async def pylint_lint_partition(partition: PylintPartition, pylint: Pylint) -> LintResult:
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (field_set.address for field_set in partition.field_sets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.interpreter_constraints,
            internal_only=True,
            direct_deps_only=True,
        ),
    )

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in partition.plugin_targets
        if plugin_tgt.has_field(PythonRequirementsField)
    )
    # Right now any Pylint transitive requirements will shadow corresponding user
    # requirements, which could lead to problems.
    pylint_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="pylint.pex",
            internal_only=True,
            requirements=PexRequirements([*pylint.all_requirements, *plugin_requirements]),
            entry_point=pylint.entry_point,
            interpreter_constraints=partition.interpreter_constraints,
            # TODO(John Sirois): Support shading python binaries:
            #   https://github.com/pantsbuild/pants/issues/9206
            additional_args=("--pex-path", requirements_pex_request.input.output_filename),
        ),
    )

    config_digest_request = Get(
        Digest,
        PathGlobs(
            globs=[pylint.config] if pylint.config else [],
            glob_match_error_behavior=GlobMatchErrorBehavior.error,
            description_of_origin="the option `--pylint-config`",
        ),
    )

    prepare_plugin_sources_request = Get(
        StrippedPythonSourceFiles, PythonSourceFilesRequest(partition.plugin_targets)
    )
    prepare_python_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies)
    )
    field_set_sources_request = Get(
        SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets)
    )

    (
        pylint_pex,
        requirements_pex,
        config_digest,
        prepared_plugin_sources,
        prepared_python_sources,
        field_set_sources,
    ) = await MultiGet(
        pylint_pex_request,
        requirements_pex_request,
        config_digest_request,
        prepare_plugin_sources_request,
        prepare_python_sources_request,
        field_set_sources_request,
    )

    prefixed_plugin_sources = (
        await Get(
            Digest,
            AddPrefix(prepared_plugin_sources.stripped_source_files.snapshot.digest, "__plugins"),
        )
        if pylint.source_plugins
        else EMPTY_DIGEST
    )

    pythonpath = list(prepared_python_sources.source_roots)
    if pylint.source_plugins:
        # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must
        # point to the plugin's directory, rather than to a parent's directory, because
        # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
        # not `path.to.plugin`. (This means users must have specified the parent directory as a
        # source root.)
        pythonpath.append("__plugins")

    input_digest = await Get(
        Digest,
        MergeDigests(
            (
                pylint_pex.digest,
                requirements_pex.digest,
                config_digest,
                prefixed_plugin_sources,
                prepared_python_sources.source_files.snapshot.digest,
            )
        ),
    )

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            pylint_pex,
            argv=generate_args(source_files=field_set_sources, pylint=pylint),
            input_digest=input_digest,
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            description=f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return LintResult.from_fallible_process_result(
        result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints))
    )
Esempio n. 8
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(
            StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)
        )
        input_digests.append(prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints
        )

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest, constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            )
        )

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        url_reqs = set()  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
        name_reqs = set()  # E.g., foobar>=1.2.3
        name_req_projects = set()

        for req_str in exact_reqs:
            req = Requirement.parse(req_str)
            if req.url:  # type: ignore[attr-defined]
                url_reqs.add(req)
            else:
                name_reqs.add(req)
                name_req_projects.add(canonicalize_project_name(req.project_name))

        constraint_file_projects = {
            canonicalize_project_name(req.project_name) for req in constraints_file_reqs
        }
        # Constraints files must only contain name reqs, not URL reqs (those are already
        # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
        unconstrained_projects = name_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints
                else f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints:
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements."
                )
            else:
                # To get a full set of requirements we must add the URL requirements to the
                # constraints file, since the latter cannot contain URL requirements.
                # NB: We can only add the URL requirements we know about here, i.e., those that
                #  are transitive deps of the targets in play. There may be others in the repo.
                #  So we may end up creating a few different repository pexes, each with identical
                #  name requirements but different subsets of URL requirements. Fortunately since
                #  all these repository pexes will have identical pinned versions of everything,
                #  this is not a correctness issue, only a performance one.
                # TODO: Address this as part of providing proper lockfile support. However we
                #  generate lockfiles, they must be able to include URL requirements.
                all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(all_constraints),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                        additional_args=["-vvv"],
                    ),
                )
    elif (
        python_setup.resolve_all_constraints
        and python_setup.resolve_all_constraints_was_set_explicitly()
    ):
        raise ValueError(
            "[python-setup].resolve_all_constraints is enabled, so either "
            "[python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Esempio n. 9
0
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(
            StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)
        )
        input_digests.append(prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints
        )

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    description = request.description

    if python_setup.requirement_constraints:
        # In requirement strings Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names

        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name) for req in exact_reqs
        }
        constraints_file_contents = await Get(
            DigestContents,
            PathGlobs(
                [python_setup.requirement_constraints],
                glob_match_error_behavior=GlobMatchErrorBehavior.error,
                conjunction=GlobExpansionConjunction.all_match,
                description_of_origin="the option `--python-setup-requirement-constraints`",
            ),
        )
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=python_setup.requirement_constraints,
            )
        )
        constraint_file_projects = {
            canonicalize_project_name(req.project_name) for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            logger.warning(
                f"The constraints file {python_setup.requirement_constraints} does not contain "
                f"entries for the following requirements: {', '.join(unconstrained_projects)}"
            )

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
            python_setup.resolve_all_constraints == ResolveAllConstraintsOption.NONDEPLOYABLES
            and request.internal_only
        ):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring resolve_all_constraints setting in [python_setup] scope "
                    "because constraints file does not cover all requirements."
                )
            else:
                requirements = PexRequirements(str(req) for req in constraints_file_reqs)
                description = description or f"Resolving {python_setup.requirement_constraints}"
    elif (
        python_setup.resolve_all_constraints != ResolveAllConstraintsOption.NEVER
        and python_setup.resolve_all_constraints_was_set_explicitly()
    ):
        raise ValueError(
            f"[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            f"[python-setup].requirement_constraints must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        entry_point=request.entry_point,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        additional_args=request.additional_args,
        description=description,
    )
Esempio n. 10
0
async def mypy_typecheck_partition(partition: MyPyPartition, mypy: MyPy) -> TypecheckResult:
    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs, mypy.source_plugins)
    plugin_transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses)
    )

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in plugin_transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField)
    )

    # If the user did not set `--python-version` already, we set it ourselves based on their code's
    # interpreter constraints. This determines what AST is used by MyPy.
    python_version = (
        None
        if partition.python_version_already_configured
        else partition.interpreter_constraints.minimum_python_version()
    )

    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with
    # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if
    # 3.8+ is not installed on the machine.
    tool_interpreter_constraints = (
        partition.interpreter_constraints
        if (
            mypy.options.is_default("interpreter_constraints")
            and partition.interpreter_constraints.requires_python38_or_newer()
        )
        else PexInterpreterConstraints(mypy.interpreter_constraints)
    )

    plugin_sources_get = Get(
        PythonSourceFiles, PythonSourceFilesRequest(plugin_transitive_targets.closure)
    )
    closure_sources_get = Get(PythonSourceFiles, PythonSourceFilesRequest(partition.closure))
    roots_sources_get = Get(
        SourceFiles, SourceFilesRequest(tgt.get(PythonSources) for tgt in partition.root_targets)
    )

    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (tgt.address for tgt in partition.root_targets),
            hardcoded_interpreter_constraints=partition.interpreter_constraints,
            internal_only=True,
        ),
    )

    # TODO(John Sirois): Scope the extra requirements to the partition.
    #  Right now we just use a global set of extra requirements and these might not be compatible
    #  with all partitions. See: https://github.com/pantsbuild/pants/issues/11556
    mypy_extra_requirements_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="mypy_extra_requirements.pex",
            internal_only=True,
            requirements=PexRequirements(mypy.extra_requirements),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )
    mypy_pex_get = Get(
        VenvPex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            main=mypy.main,
            requirements=PexRequirements((*mypy.all_requirements, *plugin_requirements)),
            interpreter_constraints=tool_interpreter_constraints,
        ),
    )

    config_files_get = Get(ConfigFiles, ConfigFilesRequest, mypy.config_request)

    (
        plugin_sources,
        closure_sources,
        roots_sources,
        mypy_pex,
        requirements_pex,
        mypy_extra_requirements_pex,
        config_files,
    ) = await MultiGet(
        plugin_sources_get,
        closure_sources_get,
        roots_sources_get,
        mypy_pex_get,
        requirements_pex_get,
        mypy_extra_requirements_pex_get,
        config_files_get,
    )

    python_files = determine_python_files(roots_sources.snapshot.files)
    file_list_path = "__files.txt"
    file_list_digest_request = Get(
        Digest,
        CreateDigest([FileContent(file_list_path, "\n".join(python_files).encode())]),
    )

    typechecked_venv_pex_request = Get(
        VenvPex,
        PexRequest(
            output_filename="typechecked_venv.pex",
            internal_only=True,
            pex_path=[requirements_pex, mypy_extra_requirements_pex],
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    typechecked_venv_pex, file_list_digest = await MultiGet(
        typechecked_venv_pex_request, file_list_digest_request
    )

    merged_input_files = await Get(
        Digest,
        MergeDigests(
            [
                file_list_digest,
                plugin_sources.source_files.snapshot.digest,
                closure_sources.source_files.snapshot.digest,
                typechecked_venv_pex.digest,
                config_files.snapshot.digest,
            ]
        ),
    )

    all_used_source_roots = sorted(
        set(itertools.chain(plugin_sources.source_roots, closure_sources.source_roots))
    )
    env = {
        "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots),
        "MYPYPATH": ":".join(all_used_source_roots),
    }

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            mypy_pex,
            argv=generate_argv(
                mypy,
                typechecked_venv_pex,
                file_list_path=file_list_path,
                python_version=python_version,
            ),
            input_digest=merged_input_files,
            extra_env=env,
            description=f"Run MyPy on {pluralize(len(python_files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return TypecheckResult.from_fallible_process_result(
        result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints))
    )
Esempio n. 11
0
async def pex_from_targets(
    request: PexFromTargetsRequest,
    python_setup: PythonSetup,
    constraints_file: MaybeConstraintsFile,
) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies)))
            for tgt in targets)
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses))
        all_targets = transitive_targets.closure

    input_digests = []
    if request.additional_sources:
        input_digests.append(request.additional_sources)
    if request.include_source_files:
        prepared_sources = await Get(StrippedPythonSourceFiles,
                                     PythonSourceFilesRequest(all_targets))
        input_digests.append(
            prepared_sources.stripped_source_files.snapshot.digest)
    merged_input_digest = await Get(Digest, MergeDigests(input_digests))

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = PexInterpreterConstraints.create_from_targets(
            all_targets, python_setup)
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or PexInterpreterConstraints(
            python_setup.interpreter_constraints)

    exact_reqs = PexRequirements.create_from_requirement_fields(
        (tgt[PythonRequirementsField]
         for tgt in all_targets if tgt.has_field(PythonRequirementsField)),
        additional_requirements=request.additional_requirements,
    )

    requirements = exact_reqs
    repository_pex: Pex | None = None
    description = request.description

    if constraints_file.path:
        constraints_file_contents = await Get(DigestContents, Digest,
                                              constraints_file.digest)
        constraints_file_reqs = set(
            parse_requirements_file(
                constraints_file_contents[0].content.decode(),
                rel_path=constraints_file.path,
            ))

        # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
        # packaging canonicalize for us.
        # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
        exact_req_projects = {
            canonicalize_project_name(Requirement.parse(req).project_name)
            for req in exact_reqs
        }
        constraint_file_projects = {
            canonicalize_project_name(req.project_name)
            for req in constraints_file_reqs
        }
        unconstrained_projects = exact_req_projects - constraint_file_projects
        if unconstrained_projects:
            constraints_descr = (
                f"constraints file {constraints_file.path}"
                if python_setup.requirement_constraints else
                f"_python_constraints target {python_setup.requirement_constraints_target}"
            )
            logger.warning(
                f"The {constraints_descr} does not contain entries for the following "
                f"requirements: {', '.join(unconstrained_projects)}")

        if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or (
                python_setup.resolve_all_constraints
                == ResolveAllConstraintsOption.NONDEPLOYABLES
                and request.internal_only):
            if unconstrained_projects:
                logger.warning(
                    "Ignoring `[python_setup].resolve_all_constraints` option because constraints "
                    "file does not cover all requirements.")
            else:
                repository_pex = await Get(
                    Pex,
                    PexRequest(
                        description=
                        f"Resolving {python_setup.requirement_constraints}",
                        output_filename="repository.pex",
                        internal_only=request.internal_only,
                        requirements=PexRequirements(
                            str(req) for req in constraints_file_reqs),
                        interpreter_constraints=interpreter_constraints,
                        platforms=request.platforms,
                    ),
                )
    elif (python_setup.resolve_all_constraints !=
          ResolveAllConstraintsOption.NEVER
          and python_setup.resolve_all_constraints_was_set_explicitly()):
        raise ValueError(
            "[python-setup].resolve_all_constraints is set to "
            f"{python_setup.resolve_all_constraints.value}, so "
            "either [python-setup].requirement_constraints or "
            "[python-setup].requirement_constraints_target must also be provided."
        )

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_input_digest,
        additional_inputs=request.additional_inputs,
        repository_pex=repository_pex,
        additional_args=request.additional_args,
        description=description,
    )
Esempio n. 12
0
async def mypy_typecheck_partition(partition: MyPyPartition,
                                   mypy: MyPy) -> TypecheckResult:
    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        mypy.source_plugins)
    plugin_transitive_targets_request = Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))
    plugin_transitive_targets, launcher_script = await MultiGet(
        plugin_transitive_targets_request,
        Get(Digest, CreateDigest([LAUNCHER_FILE])))

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in plugin_transitive_targets.closure
        if plugin_tgt.has_field(PythonRequirementsField))

    # If the user did not set `--python-version` already, we set it ourselves based on their code's
    # interpreter constraints. This determines what AST is used by MyPy.
    python_version = (
        None if partition.python_version_already_configured else
        partition.interpreter_constraints.minimum_python_version())

    # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6,
    # and 3.7. However, typed-ast does not understand 3.8, so instead we must run MyPy with
    # Python 3.8 when relevant. We only do this if <3.8 can't be used, as we don't want a
    # loose requirement like `>=3.6` to result in requiring Python 3.8, which would error if
    # 3.8 is not installed on the machine.
    tool_interpreter_constraints = PexInterpreterConstraints((
        "CPython>=3.8", ) if (
            mypy.options.is_default("interpreter_constraints")
            and partition.interpreter_constraints.requires_python38_or_newer()
        ) else mypy.interpreter_constraints)

    plugin_sources_request = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(plugin_transitive_targets.closure))
    typechecked_sources_request = Get(
        PythonSourceFiles, PythonSourceFilesRequest(partition.closure))

    # Normally, this `requirements.pex` would be merged with mypy.pex via `--pex-path`. However,
    # this will cause a runtime error if the interpreter constraints are different between the
    # PEXes and they have incompatible wheels.
    #
    # Instead, we teach MyPy about the requirements by extracting the distributions from
    # requirements.pex and setting EXTRACTED_WHEELS, which our custom launcher script then
    # looks for.
    #
    # Conventionally, MyPy users might instead set `MYPYPATH` for this. However, doing this
    # results in type checking the requirements themselves.
    requirements_pex_request = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (addr for addr in partition.field_set_addresses),
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
        ),
    )
    mypy_pex_request = Get(
        Pex,
        PexRequest(
            output_filename="mypy.pex",
            internal_only=True,
            sources=launcher_script,
            requirements=PexRequirements(
                itertools.chain(mypy.all_requirements, plugin_requirements)),
            interpreter_constraints=tool_interpreter_constraints,
            entry_point=PurePath(LAUNCHER_FILE.path).stem,
        ),
    )

    config_digest_request = Get(Digest, PathGlobs, config_path_globs(mypy))

    (
        plugin_sources,
        typechecked_sources,
        mypy_pex,
        requirements_pex,
        config_digest,
    ) = await MultiGet(
        plugin_sources_request,
        typechecked_sources_request,
        mypy_pex_request,
        requirements_pex_request,
        config_digest_request,
    )

    typechecked_srcs_snapshot = typechecked_sources.source_files.snapshot
    file_list_path = "__files.txt"
    python_files = "\n".join(
        determine_python_files(
            typechecked_sources.source_files.snapshot.files))
    create_file_list_request = Get(
        Digest,
        CreateDigest([FileContent(file_list_path, python_files.encode())]),
    )

    file_list_digest, extracted_pex_distributions = await MultiGet(
        create_file_list_request,
        Get(ExtractedPexDistributions, Pex, requirements_pex))

    merged_input_files = await Get(
        Digest,
        MergeDigests([
            file_list_digest,
            plugin_sources.source_files.snapshot.digest,
            typechecked_srcs_snapshot.digest,
            mypy_pex.digest,
            extracted_pex_distributions.digest,
            config_digest,
        ]),
    )

    all_used_source_roots = sorted(
        set(
            itertools.chain(plugin_sources.source_roots,
                            typechecked_sources.source_roots)))
    env = {
        "PEX_EXTRA_SYS_PATH":
        ":".join(all_used_source_roots),
        "EXTRACTED_WHEELS":
        ":".join(extracted_pex_distributions.wheel_directory_paths),
    }

    result = await Get(
        FallibleProcessResult,
        PexProcess(
            mypy_pex,
            argv=generate_argv(mypy,
                               file_list_path=file_list_path,
                               python_version=python_version),
            input_digest=merged_input_files,
            extra_env=env,
            description=
            f"Run MyPy on {pluralize(len(typechecked_srcs_snapshot.files), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return TypecheckResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)))
Esempio n. 13
0
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest:
    if request.direct_deps_only:
        targets = await Get(Targets, Addresses(request.addresses))
        direct_deps = await MultiGet(
            Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets
        )
        all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets))
    else:
        transitive_targets = await Get(
            TransitiveTargets, TransitiveTargetsRequest(request.addresses)
        )
        all_targets = transitive_targets.closure

    if request.hardcoded_interpreter_constraints:
        interpreter_constraints = request.hardcoded_interpreter_constraints
    else:
        calculated_constraints = InterpreterConstraints.create_from_targets(
            all_targets, python_setup
        )
        # If there are no targets, we fall back to the global constraints. This is relevant,
        # for example, when running `./pants repl` with no specs.
        interpreter_constraints = calculated_constraints or InterpreterConstraints(
            python_setup.interpreter_constraints
        )

    sources_digests = []
    if request.additional_sources:
        sources_digests.append(request.additional_sources)
    if request.include_source_files:
        sources = await Get(PythonSourceFiles, PythonSourceFilesRequest(all_targets))
    else:
        sources = PythonSourceFiles.empty()

    additional_inputs_digests = []
    if request.additional_inputs:
        additional_inputs_digests.append(request.additional_inputs)
    additional_args = request.additional_args
    if request.include_local_dists:
        # Note that LocalDistsPexRequest has no `direct_deps_only` mode, so we will build all
        # local dists in the transitive closure even if the request was for direct_deps_only.
        # Since we currently use `direct_deps_only` in one case (building a requirements pex
        # when running pylint) and in that case include_local_dists=False, this seems harmless.
        local_dists = await Get(
            LocalDistsPex,
            LocalDistsPexRequest(
                request.addresses,
                internal_only=request.internal_only,
                interpreter_constraints=interpreter_constraints,
                sources=sources,
            ),
        )
        remaining_sources = local_dists.remaining_sources
        additional_inputs_digests.append(local_dists.pex.digest)
        additional_args += ("--requirements-pex", local_dists.pex.name)
    else:
        remaining_sources = sources

    remaining_sources_stripped = await Get(
        StrippedPythonSourceFiles, PythonSourceFiles, remaining_sources
    )
    sources_digests.append(remaining_sources_stripped.stripped_source_files.snapshot.digest)

    merged_sources_digest, additional_inputs = await MultiGet(
        Get(Digest, MergeDigests(sources_digests)),
        Get(Digest, MergeDigests(additional_inputs_digests)),
    )

    requirements = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in all_targets
            if tgt.has_field(PythonRequirementsField)
        ),
        additional_requirements=request.additional_requirements,
        apply_constraints=True,
    )

    description = request.description

    if requirements:
        repository_pex: Pex | None = None
        if python_setup.requirement_constraints:
            maybe_constraints_repository_pex = await Get(
                _ConstraintsRepositoryPex,
                _ConstraintsRepositoryPexRequest(
                    requirements,
                    request.platforms,
                    interpreter_constraints,
                    request.internal_only,
                    request.additional_lockfile_args,
                ),
            )
            if maybe_constraints_repository_pex.maybe_pex:
                repository_pex = maybe_constraints_repository_pex.maybe_pex
        elif (
            python_setup.resolve_all_constraints
            and python_setup.resolve_all_constraints_was_set_explicitly()
        ):
            raise ValueError(
                "`[python].resolve_all_constraints` is enabled, so "
                "`[python].requirement_constraints` must also be set."
            )
        elif request.resolve_and_lockfile:
            resolve, lockfile = request.resolve_and_lockfile
            repository_pex = await Get(
                Pex,
                PexRequest(
                    description=f"Installing {lockfile} for the resolve `{resolve}`",
                    output_filename=f"{path_safe(resolve)}_lockfile.pex",
                    internal_only=request.internal_only,
                    requirements=Lockfile(
                        file_path=lockfile,
                        file_path_description_of_origin=(
                            f"the resolve `{resolve}` (from "
                            "`[python].experimental_resolves_to_lockfiles`)"
                        ),
                        # TODO(#12314): Hook up lockfile staleness check.
                        lockfile_hex_digest=None,
                        req_strings=None,
                    ),
                    interpreter_constraints=interpreter_constraints,
                    platforms=request.platforms,
                    additional_args=request.additional_lockfile_args,
                ),
            )
        elif python_setup.lockfile:
            repository_pex = await Get(
                Pex,
                PexRequest(
                    description=f"Installing {python_setup.lockfile}",
                    output_filename="lockfile.pex",
                    internal_only=request.internal_only,
                    requirements=Lockfile(
                        file_path=python_setup.lockfile,
                        file_path_description_of_origin=(
                            "the option `[python].experimental_lockfile`"
                        ),
                        # TODO(#12314): Hook up lockfile staleness check once multiple lockfiles
                        # are supported.
                        lockfile_hex_digest=None,
                        req_strings=None,
                    ),
                    interpreter_constraints=interpreter_constraints,
                    platforms=request.platforms,
                    additional_args=request.additional_lockfile_args,
                ),
            )
        requirements = dataclasses.replace(requirements, repository_pex=repository_pex)

    return PexRequest(
        output_filename=request.output_filename,
        internal_only=request.internal_only,
        requirements=requirements,
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        main=request.main,
        sources=merged_sources_digest,
        additional_inputs=additional_inputs,
        additional_args=additional_args,
        description=description,
    )
Esempio n. 14
0
async def pylint_lint_partition(partition: PylintPartition,
                                pylint: Pylint) -> LintResult:
    requirements_pex_get = Get(
        Pex,
        PexFromTargetsRequest,
        PexFromTargetsRequest.for_requirements(
            (field_set.address for field_set in partition.field_sets),
            # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using
            # a different version for the requirements than the other two PEXes, which can result
            # in a PEX runtime error about missing dependencies.
            hardcoded_interpreter_constraints=partition.
            interpreter_constraints,
            internal_only=True,
            direct_deps_only=True,
        ),
    )

    plugin_requirements = PexRequirements.create_from_requirement_fields(
        plugin_tgt[PythonRequirementsField]
        for plugin_tgt in partition.plugin_targets
        if plugin_tgt.has_field(PythonRequirementsField))
    pylint_pex_get = Get(
        Pex,
        PexRequest(
            output_filename="pylint.pex",
            internal_only=True,
            requirements=PexRequirements(
                [*pylint.all_requirements, *plugin_requirements]),
            interpreter_constraints=partition.interpreter_constraints,
        ),
    )

    prepare_plugin_sources_get = Get(
        StrippedPythonSourceFiles,
        PythonSourceFilesRequest(partition.plugin_targets))
    prepare_python_sources_get = Get(
        PythonSourceFiles,
        PythonSourceFilesRequest(partition.targets_with_dependencies))
    field_set_sources_get = Get(
        SourceFiles,
        SourceFilesRequest(field_set.sources
                           for field_set in partition.field_sets))

    (
        pylint_pex,
        requirements_pex,
        prepared_plugin_sources,
        prepared_python_sources,
        field_set_sources,
    ) = await MultiGet(
        pylint_pex_get,
        requirements_pex_get,
        prepare_plugin_sources_get,
        prepare_python_sources_get,
        field_set_sources_get,
    )

    pylint_runner_pex, config_files = await MultiGet(
        Get(
            VenvPex,
            PexRequest(
                output_filename="pylint_runner.pex",
                interpreter_constraints=partition.interpreter_constraints,
                main=pylint.main,
                internal_only=True,
                pex_path=[pylint_pex, requirements_pex],
            ),
        ),
        Get(ConfigFiles, ConfigFilesRequest,
            pylint.config_request(field_set_sources.snapshot.dirs)),
    )

    prefixed_plugin_sources = (await Get(
        Digest,
        AddPrefix(
            prepared_plugin_sources.stripped_source_files.snapshot.digest,
            "__plugins"),
    ) if pylint.source_plugins else EMPTY_DIGEST)

    pythonpath = list(prepared_python_sources.source_roots)
    if pylint.source_plugins:
        # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must
        # point to the plugin's directory, rather than to a parent's directory, because
        # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
        # not `path.to.plugin`. (This means users must have specified the parent directory as a
        # source root.)
        pythonpath.append("__plugins")

    input_digest = await Get(
        Digest,
        MergeDigests((
            config_files.snapshot.digest,
            prefixed_plugin_sources,
            prepared_python_sources.source_files.snapshot.digest,
        )),
    )

    result = await Get(
        FallibleProcessResult,
        VenvPexProcess(
            pylint_runner_pex,
            argv=generate_argv(field_set_sources, pylint),
            input_digest=input_digest,
            extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)},
            description=
            f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.",
            level=LogLevel.DEBUG,
        ),
    )
    return LintResult.from_fallible_process_result(
        result,
        partition_description=str(
            sorted(str(c) for c in partition.interpreter_constraints)))