Exemple #1
0
async def setup_user_lockfile_requests(
        requested: RequestedPythonUserResolveNames, all_targets: AllTargets,
        python_setup: PythonSetup) -> UserGenerateLockfiles:
    if not (python_setup.enable_resolves
            and python_setup.resolves_generate_lockfiles):
        return UserGenerateLockfiles()

    resolve_to_requirements_fields = defaultdict(set)
    for tgt in all_targets:
        if not tgt.has_fields(
            (PythonRequirementResolveField, PythonRequirementsField)):
            continue
        resolve = tgt[PythonRequirementResolveField].normalized_value(
            python_setup)
        resolve_to_requirements_fields[resolve].add(
            tgt[PythonRequirementsField])

    return UserGenerateLockfiles(
        GeneratePythonLockfile(
            requirements=PexRequirements.create_from_requirement_fields(
                resolve_to_requirements_fields[resolve],
                constraints_strings=(),
            ).req_strings,
            interpreter_constraints=InterpreterConstraints(
                python_setup.resolves_to_interpreter_constraints.get(
                    resolve, python_setup.interpreter_constraints)),
            resolve_name=resolve,
            lockfile_dest=python_setup.resolves[resolve],
            use_pex=python_setup.generate_lockfiles_with_pex,
        ) for resolve in requested)
async def determine_requirement_strings_in_closure(
    request: _PexRequirementsRequest, global_requirement_constraints: GlobalRequirementConstraints
) -> PexRequirements:
    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses))
    return PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in transitive_targets.closure
            if tgt.has_field(PythonRequirementsField)
        ),
        constraints_strings=(str(constraint) for constraint in global_requirement_constraints),
    )
Exemple #3
0
async def pylint_first_party_plugins(
        pylint: Pylint) -> PylintFirstPartyPlugins:
    if not pylint.source_plugins:
        return PylintFirstPartyPlugins(FrozenOrderedSet(), FrozenOrderedSet(),
                                       EMPTY_DIGEST)

    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        pylint.source_plugins)
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))

    requirements_fields: OrderedSet[PythonRequirementsField] = OrderedSet()
    interpreter_constraints_fields: OrderedSet[
        InterpreterConstraintsField] = OrderedSet()
    for tgt in transitive_targets.closure:
        if tgt.has_field(PythonRequirementsField):
            requirements_fields.add(tgt[PythonRequirementsField])
        if tgt.has_field(InterpreterConstraintsField):
            interpreter_constraints_fields.add(
                tgt[InterpreterConstraintsField])

    # NB: Pylint source plugins must be explicitly loaded via PYTHONPATH (i.e. PEX_EXTRA_SYS_PATH).
    # The value must point to the plugin's directory, rather than to a parent's directory, because
    # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but
    # not `path.to.plugin`. (This means users must have specified the parent directory as a
    # source root.)
    stripped_sources = await Get(
        StrippedPythonSourceFiles,
        PythonSourceFilesRequest(transitive_targets.closure))
    prefixed_sources = await Get(
        Digest,
        AddPrefix(stripped_sources.stripped_source_files.snapshot.digest,
                  PylintFirstPartyPlugins.PREFIX),
    )

    return PylintFirstPartyPlugins(
        requirement_strings=PexRequirements.create_from_requirement_fields(
            requirements_fields,
            constraints_strings=(),
        ).req_strings,
        interpreter_constraints_fields=FrozenOrderedSet(
            interpreter_constraints_fields),
        sources_digest=prefixed_sources,
    )
Exemple #4
0
async def mypy_first_party_plugins(mypy: MyPy, ) -> MyPyFirstPartyPlugins:
    if not mypy.source_plugins:
        return MyPyFirstPartyPlugins(FrozenOrderedSet(), EMPTY_DIGEST, ())

    plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs,
                                        mypy.source_plugins)
    transitive_targets = await Get(
        TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses))

    requirements = PexRequirements.create_from_requirement_fields(
        (plugin_tgt[PythonRequirementsField]
         for plugin_tgt in transitive_targets.closure
         if plugin_tgt.has_field(PythonRequirementsField)),
        constraints_strings=(),
    )

    sources = await Get(PythonSourceFiles,
                        PythonSourceFilesRequest(transitive_targets.closure))
    return MyPyFirstPartyPlugins(
        requirement_strings=requirements.req_strings,
        sources_digest=sources.source_files.snapshot.digest,
        source_roots=sources.source_roots,
    )
Exemple #5
0
async def get_requirements(
    dep_owner: DependencyOwner,
    union_membership: UnionMembership,
    setup_py_generation: SetupPyGeneration,
) -> ExportedTargetRequirements:
    transitive_targets = await Get(
        TransitiveTargets,
        TransitiveTargetsRequest([dep_owner.exported_target.target.address]),
    )
    ownable_tgts = [
        tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
    ]
    owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
    owned_by_us: set[Target] = set()
    owned_by_others: set[Target] = set()
    for tgt, owner in zip(ownable_tgts, owners):
        (owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)

    # Get all 3rdparty deps of our owned deps.
    #
    # Note that we need only consider requirements that are direct dependencies of our owned deps:
    # If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
    # if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
    # then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
    # will require ET.
    direct_deps_tgts = await MultiGet(
        Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
    )

    transitive_excludes: FrozenOrderedSet[Target] = FrozenOrderedSet()
    uneval_trans_excl = [
        tgt.get(Dependencies).unevaluated_transitive_excludes for tgt in transitive_targets.closure
    ]
    if uneval_trans_excl:
        nested_trans_excl = await MultiGet(
            Get(Targets, UnparsedAddressInputs, unparsed) for unparsed in uneval_trans_excl
        )
        transitive_excludes = FrozenOrderedSet(
            itertools.chain.from_iterable(excludes for excludes in nested_trans_excl)
        )

    direct_deps_chained = FrozenOrderedSet(itertools.chain.from_iterable(direct_deps_tgts))
    direct_deps_with_excl = direct_deps_chained.difference(transitive_excludes)

    req_strs = list(
        PexRequirements.create_from_requirement_fields(
            (
                tgt[PythonRequirementsField]
                for tgt in direct_deps_with_excl
                if tgt.has_field(PythonRequirementsField)
            ),
            constraints_strings=(),
        ).req_strings
    )

    # Add the requirements on any exported targets on which we depend.
    kwargs_for_exported_targets_we_depend_on = await MultiGet(
        Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
    )
    req_strs.extend(
        f"{kwargs.name}{setup_py_generation.first_party_dependency_version(kwargs.version)}"
        for kwargs in set(kwargs_for_exported_targets_we_depend_on)
    )
    return ExportedTargetRequirements(req_strs)
async def _setup_constraints_repository_pex(
    constraints_request: _ConstraintsRepositoryPexRequest,
    python_setup: PythonSetup,
    global_requirement_constraints: GlobalRequirementConstraints,
) -> OptionalPexRequest:
    request = constraints_request.repository_pex_request
    # NB: it isn't safe to resolve against the whole constraints file if
    # platforms are in use. See https://github.com/pantsbuild/pants/issues/12222.
    if not python_setup.resolve_all_constraints or request.platforms or request.complete_platforms:
        return OptionalPexRequest(None)

    constraints_path = python_setup.requirement_constraints
    assert constraints_path is not None

    transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses))

    requirements = PexRequirements.create_from_requirement_fields(
        (
            tgt[PythonRequirementsField]
            for tgt in transitive_targets.closure
            if tgt.has_field(PythonRequirementsField)
        ),
        constraints_strings=(str(constraint) for constraint in global_requirement_constraints),
    )

    # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let
    # packaging canonicalize for us.
    # See: https://www.python.org/dev/peps/pep-0503/#normalized-names
    url_reqs = set()  # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch'
    name_reqs = set()  # E.g., foobar>=1.2.3
    name_req_projects = set()
    constraints_file_reqs = set(global_requirement_constraints)

    for req_str in requirements.req_strings:
        req = PipRequirement.parse(req_str)
        if req.url:
            url_reqs.add(req)
        else:
            name_reqs.add(req)
            name_req_projects.add(canonicalize_project_name(req.project_name))

    constraint_file_projects = {
        canonicalize_project_name(req.project_name) for req in constraints_file_reqs
    }
    # Constraints files must only contain name reqs, not URL reqs (those are already
    # constrained by their very nature). See https://github.com/pypa/pip/issues/8210.
    unconstrained_projects = name_req_projects - constraint_file_projects
    if unconstrained_projects:
        logger.warning(
            f"The constraints file {constraints_path} does not contain "
            f"entries for the following requirements: {', '.join(unconstrained_projects)}.\n\n"
            f"Ignoring `[python_setup].resolve_all_constraints` option."
        )
        return OptionalPexRequest(None)

    interpreter_constraints = await Get(
        InterpreterConstraints,
        InterpreterConstraintsRequest,
        request.to_interpreter_constraints_request(),
    )

    # To get a full set of requirements we must add the URL requirements to the
    # constraints file, since the latter cannot contain URL requirements.
    # NB: We can only add the URL requirements we know about here, i.e., those that
    #  are transitive deps of the targets in play. There may be others in the repo.
    #  So we may end up creating a few different repository pexes, each with identical
    #  name requirements but different subsets of URL requirements. Fortunately since
    #  all these repository pexes will have identical pinned versions of everything,
    #  this is not a correctness issue, only a performance one.
    all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)}
    repository_pex = PexRequest(
        description=f"Resolving {constraints_path}",
        output_filename="repository.pex",
        internal_only=request.internal_only,
        requirements=PexRequirements(
            all_constraints,
            constraints_strings=(str(constraint) for constraint in global_requirement_constraints),
            # TODO: See PexRequirements docs.
            is_all_constraints_resolve=True,
        ),
        interpreter_constraints=interpreter_constraints,
        platforms=request.platforms,
        complete_platforms=request.complete_platforms,
        additional_args=request.additional_lockfile_args,
    )
    return OptionalPexRequest(repository_pex)