async def setup_pytest_lockfile( _: PytestLockfileSentinel, pytest: PyTest, python_setup: PythonSetup) -> PythonLockfileRequest: if not pytest.uses_lockfile: return PythonLockfileRequest.from_tool(pytest) # Even though we run each python_tests target in isolation, we need a single lockfile that # works with them all (and their transitive deps). # # This first computes the constraints for each individual `python_tests` target # (which will AND across each target in the closure). Then, it ORs all unique resulting # interpreter constraints. The net effect is that every possible Python interpreter used will # be covered. all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")])) transitive_targets_per_test = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_build_targets if PythonTestFieldSet.is_applicable(tgt)) unique_constraints = { InterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) for transitive_targets in transitive_targets_per_test } constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) return PythonLockfileRequest.from_tool( pytest, constraints or InterpreterConstraints(python_setup.interpreter_constraints))
async def setup_bandit_lockfile( _: BanditLockfileSentinel, bandit: Bandit, python_setup: PythonSetup ) -> GeneratePythonLockfile: if not bandit.uses_lockfile: return GeneratePythonLockfile.from_tool( bandit, use_pex=python_setup.generate_lockfiles_with_pex ) # While Bandit will run in partitions, we need a single lockfile that works with every # partition. # # This ORs all unique interpreter constraints. The net effect is that every possible Python # interpreter used will be covered. all_tgts = await Get(AllTargets, AllTargetsRequest()) unique_constraints = { InterpreterConstraints.create_from_targets([tgt], python_setup) for tgt in all_tgts if BanditFieldSet.is_applicable(tgt) } constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints)) return GeneratePythonLockfile.from_tool( bandit, constraints or InterpreterConstraints(python_setup.interpreter_constraints), use_pex=python_setup.generate_lockfiles_with_pex, )
async def package_python_dist( field_set: PythonDistributionFieldSet, python_setup: PythonSetup, ) -> BuiltPackage: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([field_set.address])) exported_target = ExportedTarget(transitive_targets.roots[0]) interpreter_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup) or InterpreterConstraints( python_setup.interpreter_constraints) chroot = await Get( SetupPyChroot, SetupPyChrootRequest(exported_target, py2=interpreter_constraints.includes_python2()), ) # If commands were provided, run setup.py with them; Otherwise just dump chroots. commands = exported_target.target.get(SetupPyCommandsField).value or () if commands: validate_commands(commands) setup_py_result = await Get( RunSetupPyResult, RunSetupPyRequest(exported_target, interpreter_constraints, chroot, commands), ) dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output) return BuiltPackage( setup_py_result.output, tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files), ) else: dirname = f"{chroot.setup_kwargs.name}-{chroot.setup_kwargs.version}" rel_chroot = await Get(Digest, AddPrefix(chroot.digest, dirname)) return BuiltPackage(rel_chroot, (BuiltPackageArtifact(dirname), ))
async def setup_mypy_lockfile( _: MyPyLockfileSentinel, first_party_plugins: MyPyFirstPartyPlugins, mypy: MyPy, python_setup: PythonSetup, ) -> GeneratePythonLockfile: if not mypy.uses_lockfile: return GeneratePythonLockfile.from_tool( mypy, use_pex=python_setup.generate_lockfiles_with_pex) constraints = mypy.interpreter_constraints if mypy.options.is_default("interpreter_constraints"): all_tgts = await Get(AllTargets, AllTargetsRequest()) all_transitive_targets = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_tgts if MyPyFieldSet.is_applicable(tgt)) unique_constraints = { InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup) for transitive_targets in all_transitive_targets } code_constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) if code_constraints.requires_python38_or_newer( python_setup.interpreter_universe): constraints = code_constraints return GeneratePythonLockfile.from_tool( mypy, constraints, extra_requirements=first_party_plugins.requirement_strings, use_pex=python_setup.generate_lockfiles_with_pex, )
async def setup_mypy_lockfile( _: MyPyLockfileSentinel, first_party_plugins: MyPyFirstPartyPlugins, mypy: MyPy, python_setup: PythonSetup, ) -> PythonLockfileRequest: if not mypy.uses_lockfile: return PythonLockfileRequest.from_tool(mypy) constraints = mypy.interpreter_constraints if mypy.options.is_default("interpreter_constraints"): all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")])) all_transitive_targets = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_build_targets if MyPyFieldSet.is_applicable(tgt)) unique_constraints = { InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup) for transitive_targets in all_transitive_targets } code_constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) if code_constraints.requires_python38_or_newer( python_setup.interpreter_universe): constraints = code_constraints return PythonLockfileRequest.from_tool( mypy, constraints, extra_requirements=first_party_plugins.requirement_strings)
async def setup_setuptools_lockfile( _: SetuptoolsLockfileSentinel, setuptools: Setuptools, python_setup: PythonSetup ) -> GeneratePythonLockfile: if not setuptools.uses_custom_lockfile: return GeneratePythonLockfile.from_tool( setuptools, use_pex=python_setup.generate_lockfiles_with_pex ) all_tgts = await Get(AllTargets, AllTargetsRequest()) transitive_targets_per_python_dist = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_tgts if PythonDistributionFieldSet.is_applicable(tgt) ) unique_constraints = { InterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) or InterpreterConstraints(python_setup.interpreter_constraints) for transitive_targets in transitive_targets_per_python_dist } constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints)) return GeneratePythonLockfile.from_tool( setuptools, constraints or InterpreterConstraints(python_setup.interpreter_constraints), use_pex=python_setup.generate_lockfiles_with_pex, )
async def package_python_dist( field_set: PythonDistributionFieldSet, python_setup: PythonSetup, ) -> BuiltPackage: transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])) exported_target = ExportedTarget(transitive_targets.roots[0]) dist_tgt = exported_target.target wheel = dist_tgt.get(WheelField).value sdist = dist_tgt.get(SDistField).value if not wheel and not sdist: raise NoDistTypeSelected( f"In order to package {dist_tgt.address.spec} at least one of {WheelField.alias!r} or " f"{SDistField.alias!r} must be `True`." ) wheel_config_settings = dist_tgt.get(WheelConfigSettingsField).value or FrozenDict() sdist_config_settings = dist_tgt.get(SDistConfigSettingsField).value or FrozenDict() interpreter_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup ) or InterpreterConstraints(python_setup.interpreter_constraints) chroot = await Get( DistBuildChroot, DistBuildChrootRequest( exported_target, py2=interpreter_constraints.includes_python2(), ), ) # We prefix the entire chroot, and run with this prefix as the cwd, so that we can capture # any changes setup made within it without also capturing other artifacts of the pex # process invocation. chroot_prefix = "chroot" working_directory = os.path.join(chroot_prefix, chroot.working_directory) prefixed_chroot = await Get(Digest, AddPrefix(chroot.digest, chroot_prefix)) build_system = await Get(BuildSystem, BuildSystemRequest(prefixed_chroot, working_directory)) setup_py_result = await Get( DistBuildResult, DistBuildRequest( build_system=build_system, interpreter_constraints=interpreter_constraints, build_wheel=wheel, build_sdist=sdist, input=prefixed_chroot, working_directory=working_directory, target_address_spec=exported_target.target.address.spec, wheel_config_settings=wheel_config_settings, sdist_config_settings=sdist_config_settings, ), ) dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output) return BuiltPackage( setup_py_result.output, tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files), )
async def export_venv(request: ExportedVenvRequest, python_setup: PythonSetup, pex_env: PexEnvironment) -> ExportableData: # Pick a single interpreter for the venv. interpreter_constraints = InterpreterConstraints.create_from_targets( request.targets, python_setup) if not interpreter_constraints: # If there were no targets that defined any constraints, fall back to the global ones. interpreter_constraints = InterpreterConstraints( python_setup.interpreter_constraints) min_interpreter = interpreter_constraints.snap_to_minimum( python_setup.interpreter_universe) if not min_interpreter: raise ExportError( "The following interpreter constraints were computed for all the targets for which " f"export was requested: {interpreter_constraints}. There is no python interpreter " "compatible with these constraints. Please restrict the target set to one that shares " "a compatible interpreter.") venv_pex = await Get( VenvPex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (tgt.address for tgt in request.targets), internal_only=True, hardcoded_interpreter_constraints=min_interpreter, ), ) complete_pex_env = pex_env.in_workspace() venv_abspath = os.path.join(complete_pex_env.pex_root, venv_pex.venv_rel_dir) # Run the venv_pex to get the full python version (including patch #), so we # can use it in the symlink name. res = await Get( ProcessResult, VenvPexProcess( venv_pex=venv_pex, description="Create virtualenv", argv=[ "-c", "import sys; print('.'.join(str(x) for x in sys.version_info[0:3]))" ], input_digest=venv_pex.digest, ), ) py_version = res.stdout.strip().decode() return ExportableData( f"virtualenv for {min_interpreter}", os.path.join("python", "virtualenv"), symlinks=[Symlink(venv_abspath, py_version)], )
async def _black_interpreter_constraints( black: Black, python_setup: PythonSetup ) -> InterpreterConstraints: constraints = black.interpreter_constraints if black.options.is_default("interpreter_constraints"): all_tgts = await Get(AllTargets, AllTargetsRequest()) # TODO: fix to use `FieldSet.is_applicable()`. code_constraints = InterpreterConstraints.create_from_targets( (tgt for tgt in all_tgts if not tgt.get(SkipBlackField).value), python_setup ) if code_constraints is not None and code_constraints.requires_python38_or_newer( python_setup.interpreter_universe ): constraints = code_constraints return constraints
async def _bandit_interpreter_constraints( python_setup: PythonSetup) -> InterpreterConstraints: # While Bandit will run in partitions, we need a set of constraints that works with every # partition. # # This ORs all unique interpreter constraints. The net effect is that every possible Python # interpreter used will be covered. all_tgts = await Get(AllTargets, AllTargetsRequest()) unique_constraints = { InterpreterConstraints.create_from_targets([tgt], python_setup) for tgt in all_tgts if BanditFieldSet.is_applicable(tgt) } constraints = InterpreterConstraints( itertools.chain.from_iterable(ic for ic in unique_constraints if ic)) return constraints or InterpreterConstraints( python_setup.interpreter_constraints)
async def interpreter_constraints_for_targets( request: InterpreterConstraintsRequest, python_setup: PythonSetup) -> InterpreterConstraints: if request.hardcoded_interpreter_constraints: return request.hardcoded_interpreter_constraints transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses)) calculated_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup) # If there are no targets, we fall back to the global constraints. This is relevant, # for example, when running `./pants repl` with no specs or only on targets without # `interpreter_constraints` (e.g. `python_requirement`). interpreter_constraints = calculated_constraints or InterpreterConstraints( python_setup.interpreter_constraints) return interpreter_constraints
async def _mypy_interpreter_constraints( mypy: MyPy, python_setup: PythonSetup) -> InterpreterConstraints: constraints = mypy.interpreter_constraints if mypy.options.is_default("interpreter_constraints"): all_tgts = await Get(AllTargets, AllTargetsRequest()) unique_constraints = { InterpreterConstraints.create_from_targets([tgt], python_setup) for tgt in all_tgts if MyPyFieldSet.is_applicable(tgt) } code_constraints = InterpreterConstraints( itertools.chain.from_iterable(ic for ic in unique_constraints if ic)) if code_constraints.requires_python38_or_newer( python_setup.interpreter_universe): constraints = code_constraints return constraints
async def mypy_typecheck( request: MyPyRequest, mypy: MyPy, python_setup: PythonSetup ) -> CheckResults: if mypy.skip: return CheckResults([], checker_name="MyPy") # When determining how to batch by interpreter constraints, we must consider the entire # transitive closure to get the final resulting constraints. # TODO(#10863): Improve the performance of this. transitive_targets_per_field_set = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])) for field_set in request.field_sets ) interpreter_constraints_to_transitive_targets = defaultdict(set) for transitive_targets in transitive_targets_per_field_set: interpreter_constraints = ( InterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) or mypy.interpreter_constraints ) interpreter_constraints_to_transitive_targets[interpreter_constraints].add( transitive_targets ) partitions = [] for interpreter_constraints, all_transitive_targets in sorted( interpreter_constraints_to_transitive_targets.items() ): combined_roots: OrderedSet[Target] = OrderedSet() combined_closure: OrderedSet[Target] = OrderedSet() for transitive_targets in all_transitive_targets: combined_roots.update(transitive_targets.roots) combined_closure.update(transitive_targets.closure) partitions.append( MyPyPartition( FrozenOrderedSet(combined_roots), FrozenOrderedSet(combined_closure), interpreter_constraints, ) ) partitioned_results = await MultiGet( Get(CheckResult, MyPyPartition, partition) for partition in partitions ) return CheckResults(partitioned_results, checker_name="MyPy")
async def setup_black_lockfile( _: BlackLockfileSentinel, black: Black, python_setup: PythonSetup) -> PythonLockfileRequest: if not black.uses_lockfile: return PythonLockfileRequest.from_tool(black) constraints = black.interpreter_constraints if black.options.is_default("interpreter_constraints"): all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")])) code_constraints = InterpreterConstraints.create_from_targets( (tgt for tgt in all_build_targets if not tgt.get(SkipBlackField).value), python_setup) if code_constraints.requires_python38_or_newer( python_setup.interpreter_universe): constraints = code_constraints return PythonLockfileRequest.from_tool(black, constraints)
async def setup_black_lockfile( _: BlackLockfileSentinel, black: Black, python_setup: PythonSetup) -> PythonLockfileRequest: if not black.uses_lockfile: return PythonLockfileRequest.from_tool(black) constraints = black.interpreter_constraints if black.options.is_default("interpreter_constraints"): all_tgts = await Get(AllTargets, AllTargetsRequest()) # TODO: fix to use `FieldSet.is_applicable()`. code_constraints = InterpreterConstraints.create_from_targets( (tgt for tgt in all_tgts if not tgt.get(SkipBlackField).value), python_setup) if code_constraints.requires_python38_or_newer( python_setup.interpreter_universe): constraints = code_constraints return PythonLockfileRequest.from_tool(black, constraints)
async def infer_python_dependencies_via_imports( request: InferPythonImportDependencies, python_infer_subsystem: PythonInferSubsystem, python_setup: PythonSetup, ) -> InferredDependencies: if not python_infer_subsystem.imports: return InferredDependencies([]) wrapped_tgt = await Get(WrappedTarget, Address, request.sources_field.address) explicitly_provided_deps, detected_imports = await MultiGet( Get(ExplicitlyProvidedDependencies, DependenciesRequest(wrapped_tgt.target[Dependencies])), Get( ParsedPythonImports, ParsePythonImportsRequest( request.sources_field, InterpreterConstraints.create_from_targets( [wrapped_tgt.target], python_setup), string_imports=python_infer_subsystem.string_imports, ), ), ) owners_per_import = await MultiGet( Get(PythonModuleOwners, PythonModule(imported_module)) for imported_module in detected_imports) merged_result: set[Address] = set() for owners, imp in zip(owners_per_import, detected_imports): merged_result.update(owners.unambiguous) address = wrapped_tgt.target.address explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference( owners.ambiguous, address, import_reference="module", context=f"The target {address} imports `{imp}`", ) maybe_disambiguated = explicitly_provided_deps.disambiguated( owners.ambiguous) if maybe_disambiguated: merged_result.add(maybe_disambiguated) return InferredDependencies(sorted(merged_result))
async def setup_flake8_lockfile( _: Flake8LockfileSentinel, flake8: Flake8, python_setup: PythonSetup ) -> PythonLockfileRequest: if not flake8.uses_lockfile: return PythonLockfileRequest.from_tool(flake8) # While Flake8 will run in partitions, we need a single lockfile that works with every # partition. # # This ORs all unique interpreter constraints. The net effect is that every possible Python # interpreter used will be covered. all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")])) unique_constraints = { InterpreterConstraints.create_from_targets([tgt], python_setup) for tgt in all_build_targets if Flake8FieldSet.is_applicable(tgt) } constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints)) return PythonLockfileRequest.from_tool( flake8, constraints or InterpreterConstraints(python_setup.interpreter_constraints) )
async def _pytest_interpreter_constraints( python_setup: PythonSetup) -> InterpreterConstraints: # Even though we run each python_tests target in isolation, we need a single set of constraints # that works with them all (and their transitive deps). # # This first computes the constraints for each individual `python_test` target # (which will AND across each target in the closure). Then, it ORs all unique resulting # interpreter constraints. The net effect is that every possible Python interpreter used will # be covered. all_tgts = await Get(AllTargets, AllTargetsRequest()) transitive_targets_per_test = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_tgts if PythonTestFieldSet.is_applicable(tgt)) unique_constraints = { InterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) for transitive_targets in transitive_targets_per_test } constraints = InterpreterConstraints( itertools.chain.from_iterable(ic for ic in unique_constraints if ic)) return constraints or InterpreterConstraints( python_setup.interpreter_constraints)
async def _by_interpreter_constraints_and_resolve( field_sets: Iterable[FS], python_setup: PythonSetup, ) -> Mapping[ tuple[ResolveName, InterpreterConstraints], tuple[OrderedSet[FS], OrderedSet[CoarsenedTarget]], ]: coarsened_targets = await Get( CoarsenedTargets, CoarsenedTargetsRequest( (field_set.address for field_set in field_sets), expanded_targets=True ), ) coarsened_targets_by_address = coarsened_targets.by_address() resolve_and_interpreter_constraints_to_coarsened_targets: Mapping[ tuple[str, InterpreterConstraints], tuple[OrderedSet[FS], OrderedSet[CoarsenedTarget]], ] = defaultdict(lambda: (OrderedSet(), OrderedSet())) for root in field_sets: ct = coarsened_targets_by_address[root.address] # If there is a cycle in the roots, we still only take the first resolve, as the other # members will be validated when the partition is actually built. resolve = ct.representative[PythonResolveField].normalized_value(python_setup) interpreter_constraints = InterpreterConstraints.create_from_targets( ct.members, python_setup ) # If a CoarsenedTarget did not have IntepreterConstraints, then it's because it didn't # contain any targets with the field, and so there is no point checking it. if interpreter_constraints is None: continue roots, root_cts = resolve_and_interpreter_constraints_to_coarsened_targets[ (resolve, interpreter_constraints) ] roots.add(root) root_cts.add(ct) return resolve_and_interpreter_constraints_to_coarsened_targets
async def setup_setuptools_lockfile( _: SetuptoolsLockfileSentinel, setuptools: Setuptools, python_setup: PythonSetup) -> PythonLockfileRequest: if not setuptools.uses_lockfile: return PythonLockfileRequest.from_tool(setuptools) all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")])) transitive_targets_per_python_dist = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_build_targets if PythonDistributionFieldSet.is_applicable(tgt)) unique_constraints = { InterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) or InterpreterConstraints(python_setup.interpreter_constraints) for transitive_targets in transitive_targets_per_python_dist } constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) return PythonLockfileRequest.from_tool( setuptools, constraints or InterpreterConstraints(python_setup.interpreter_constraints))
async def mypy_determine_partitions( request: MyPyRequest, mypy: MyPy, python_setup: PythonSetup) -> MyPyPartitions: # When determining how to batch by interpreter constraints, we must consider the entire # transitive closure to get the final resulting constraints. transitive_targets_per_field_set = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])) for field_set in request.field_sets) resolve_and_interpreter_constraints_to_transitive_targets = defaultdict( set) for transitive_targets in transitive_targets_per_field_set: resolve = transitive_targets.roots[0][ PythonResolveField].normalized_value(python_setup) interpreter_constraints = (InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup) or mypy.interpreter_constraints) resolve_and_interpreter_constraints_to_transitive_targets[( resolve, interpreter_constraints)].add(transitive_targets) partitions = [] for (_resolve, interpreter_constraints), all_transitive_targets in sorted( resolve_and_interpreter_constraints_to_transitive_targets.items()): combined_roots: OrderedSet[Target] = OrderedSet() combined_closure: OrderedSet[Target] = OrderedSet() for transitive_targets in all_transitive_targets: combined_roots.update(transitive_targets.roots) combined_closure.update(transitive_targets.closure) partitions.append( # Note that we don't need to pass the resolve. pex_from_targets.py will already # calculate it by inspecting the roots & validating that all dependees are valid. MyPyPartition( FrozenOrderedSet(combined_roots), FrozenOrderedSet(combined_closure), interpreter_constraints, )) return MyPyPartitions(partitions)
async def infer_python_dependencies_via_imports( request: InferPythonImportDependencies, python_infer_subsystem: PythonInferSubsystem, python_setup: PythonSetup, ) -> InferredDependencies: if not python_infer_subsystem.imports: return InferredDependencies([]) wrapped_tgt = await Get(WrappedTarget, Address, request.sources_field.address) explicitly_provided_deps, detected_imports = await MultiGet( Get(ExplicitlyProvidedDependencies, DependenciesRequest(wrapped_tgt.target[Dependencies])), Get( ParsedPythonImports, ParsePythonImportsRequest( cast(PythonSourceField, request.sources_field), InterpreterConstraints.create_from_targets( [wrapped_tgt.target], python_setup), string_imports=python_infer_subsystem.string_imports, string_imports_min_dots=python_infer_subsystem. string_imports_min_dots, ), ), ) owners_per_import = await MultiGet( Get(PythonModuleOwners, PythonModule(imported_module)) for imported_module in detected_imports) merged_result: set[Address] = set() unowned_imports: set[str] = set() address = wrapped_tgt.target.address for owners, imp in zip(owners_per_import, detected_imports): merged_result.update(owners.unambiguous) explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference( owners.ambiguous, address, import_reference="module", context=f"The target {address} imports `{imp}`", ) maybe_disambiguated = explicitly_provided_deps.disambiguated( owners.ambiguous) if maybe_disambiguated: merged_result.add(maybe_disambiguated) if not owners.unambiguous and imp.split( ".")[0] not in DEFAULT_UNOWNED_DEPENDENCIES: unowned_imports.add(imp) unowned_dependency_behavior = python_infer_subsystem.unowned_dependency_behavior if unowned_imports and unowned_dependency_behavior is not UnownedDependencyUsage.DoNothing: raise_error = unowned_dependency_behavior is UnownedDependencyUsage.RaiseError log = logger.error if raise_error else logger.warning log(f"The following imports in {address} have no owners:\n\n{bullet_list(unowned_imports)}\n\n" "If you are expecting this import to be provided by your own firstparty code, ensure that it is contained within a source root. " "Otherwise if you are using a requirements file, consider adding the relevant package.\n" "Otherwise consider declaring a `python_requirement_library` target, which can then be inferred.\n" f"See {doc_url('python-third-party-dependencies')}") if raise_error: raise UnownedDependencyError( "One or more unowned dependencies detected. Check logs for more details." ) return InferredDependencies(sorted(merged_result))
async def py_constraints( addresses: Addresses, console: Console, py_constraints_subsystem: PyConstraintsSubsystem, python_setup: PythonSetup, registered_target_types: RegisteredTargetTypes, union_membership: UnionMembership, ) -> PyConstraintsGoal: if py_constraints_subsystem.summary: if addresses: console.print_stderr( "The `py-constraints --summary` goal does not take file/target arguments. Run " "`help py-constraints` for more details." ) return PyConstraintsGoal(exit_code=1) with_generated_targets, without_generated_targets = await MultiGet( Get(Targets, AddressSpecs([DescendantAddresses("")])), Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")])), ) all_python_targets = sorted( { t for t in (*with_generated_targets, *without_generated_targets) if t.has_field(InterpreterConstraintsField) }, key=lambda tgt: cast(Address, tgt.address), ) constraints_per_tgt = [ InterpreterConstraints.create_from_targets([tgt], python_setup) for tgt in all_python_targets ] transitive_targets_per_tgt = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_python_targets ) transitive_constraints_per_tgt = [ InterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) for transitive_targets in transitive_targets_per_tgt ] dependees_per_root = await MultiGet( Get(Dependees, DependeesRequest([tgt.address], transitive=True, include_roots=False)) for tgt in all_python_targets ) data = [ { "Target": tgt.address.spec, "Constraints": str(constraints), "Transitive Constraints": str(transitive_constraints), "# Dependencies": len(transitive_targets.dependencies), "# Dependees": len(dependees), } for tgt, constraints, transitive_constraints, transitive_targets, dependees in zip( all_python_targets, constraints_per_tgt, transitive_constraints_per_tgt, transitive_targets_per_tgt, dependees_per_root, ) ] with py_constraints_subsystem.output_sink(console) as stdout: writer = csv.DictWriter( stdout, fieldnames=[ "Target", "Constraints", "Transitive Constraints", "# Dependencies", "# Dependees", ], ) writer.writeheader() for entry in data: writer.writerow(entry) return PyConstraintsGoal(exit_code=0) transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(addresses)) final_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup ) if not final_constraints: target_types_with_constraints = sorted( tgt_type.alias for tgt_type in registered_target_types.types if tgt_type.class_has_field(InterpreterConstraintsField, union_membership) ) logger.warning( "No Python files/targets matched for the `py-constraints` goal. All target types with " f"Python interpreter constraints: {', '.join(target_types_with_constraints)}" ) return PyConstraintsGoal(exit_code=0) constraints_to_addresses = defaultdict(set) for tgt in transitive_targets.closure: constraints = InterpreterConstraints.create_from_targets([tgt], python_setup) if not constraints: continue constraints_to_addresses[constraints].add(tgt.address) with py_constraints_subsystem.output(console) as output_stdout: output_stdout(f"Final merged constraints: {final_constraints}\n") if len(addresses) > 1: merged_constraints_warning = ( "(These are the constraints used if you were to depend on all of the input " "files/targets together, even though they may end up never being used together in " "the real world. Consider using a more precise query or running " "`./pants py-constraints --summary`.)\n" ) output_stdout(indent(fill(merged_constraints_warning, 80), " ")) for constraint, addrs in sorted(constraints_to_addresses.items()): output_stdout(f"\n{constraint}\n") for addr in sorted(addrs): output_stdout(f" {addr}\n") return PyConstraintsGoal(exit_code=0)
async def setup_pytest_for_target( request: TestSetupRequest, pytest: PyTest, test_subsystem: TestSubsystem, python_setup: PythonSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, test_extra_env: TestExtraEnv, global_options: GlobalOptions, complete_env: CompleteEnvironment, ) -> TestSetup: transitive_targets, plugin_setups = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([request.field_set.address])), Get(AllPytestPluginSetups, AllPytestPluginSetupsRequest(request.field_set.address)), ) all_targets = transitive_targets.closure interpreter_constraints = InterpreterConstraints.create_from_targets( all_targets, python_setup) requirements_pex_get = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( [request.field_set.address], internal_only=True, resolve_and_lockfile=request.field_set.resolve. resolve_and_lockfile(python_setup), ), ) pytest_pex_get = Get( Pex, PexRequest( output_filename="pytest.pex", requirements=pytest.pex_requirements(), interpreter_constraints=interpreter_constraints, internal_only=True, ), ) # Ensure that the empty extra output dir exists. extra_output_directory_digest_get = Get( Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)])) prepared_sources_get = Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. field_set_source_files_get = Get( SourceFiles, SourceFilesRequest([request.field_set.sources])) ( pytest_pex, requirements_pex, prepared_sources, field_set_source_files, extra_output_directory_digest, ) = await MultiGet( pytest_pex_get, requirements_pex_get, prepared_sources_get, field_set_source_files_get, extra_output_directory_digest_get, ) local_dists = await Get( LocalDistsPex, LocalDistsPexRequest( [request.field_set.address], interpreter_constraints=interpreter_constraints, sources=prepared_sources, ), ) pytest_runner_pex_get = Get( VenvPex, PexRequest( output_filename="pytest_runner.pex", interpreter_constraints=interpreter_constraints, main=pytest.main, internal_only=True, pex_path=[pytest_pex, requirements_pex, local_dists.pex], ), ) config_files_get = Get( ConfigFiles, ConfigFilesRequest, pytest.config_request(field_set_source_files.snapshot.dirs), ) pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get, config_files_get) input_digest = await Get( Digest, MergeDigests(( coverage_config.digest, local_dists.remaining_sources.source_files.snapshot.digest, config_files.snapshot.digest, extra_output_directory_digest, *(plugin_setup.digest for plugin_setup in plugin_setups), )), ) add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"] output_files = [] results_file_name = None if pytest.options.junit_xml_dir and not request.is_debug: results_file_name = f"{request.field_set.address.path_safe_spec}.xml" add_opts.extend((f"--junitxml={results_file_name}", "-o", f"junit_family={pytest.options.junit_family}")) output_files.append(results_file_name) coverage_args = [] if test_subsystem.use_coverage and not request.is_debug: pytest.validate_pytest_cov_included() output_files.append(".coverage") if coverage_subsystem.filter: cov_args = [f"--cov={morf}" for morf in coverage_subsystem.filter] else: # N.B.: Passing `--cov=` or `--cov=.` to communicate "record coverage for all sources" # fails in certain contexts as detailed in: # https://github.com/pantsbuild/pants/issues/12390 # Instead we focus coverage on just the directories containing python source files # materialized to the Process chroot. cov_args = [ f"--cov={source_root}" for source_root in prepared_sources.source_roots ] coverage_args = [ "--cov-report=", # Turn off output. f"--cov-config={coverage_config.path}", *cov_args, ] extra_env = { "PYTEST_ADDOPTS": " ".join(add_opts), "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots), **test_extra_env.env, # NOTE: `complete_env` intentionally after `test_extra_env` to allow overriding within # `python_tests` **complete_env.get_subset(request.field_set.extra_env_vars.value or ()), } # Cache test runs only if they are successful, or not at all if `--test-force`. cache_scope = (ProcessCacheScope.PER_SESSION if test_subsystem.force else ProcessCacheScope.SUCCESSFUL) process = await Get( Process, VenvPexProcess( pytest_runner_pex, argv=(*pytest.options.args, *coverage_args, *field_set_source_files.files), extra_env=extra_env, input_digest=input_digest, output_directories=(_EXTRA_OUTPUT_DIR, ), output_files=output_files, timeout_seconds=request.field_set.timeout. calculate_from_global_options(pytest), execution_slot_variable=pytest.options.execution_slot_var, description=f"Run Pytest for {request.field_set.address}", level=LogLevel.DEBUG, cache_scope=cache_scope, ), ) return TestSetup(process, results_file_name=results_file_name)
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: if request.direct_deps_only: targets = await Get(Targets, Addresses(request.addresses)) direct_deps = await MultiGet( Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets ) all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets)) else: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest(request.addresses) ) all_targets = transitive_targets.closure if request.hardcoded_interpreter_constraints: interpreter_constraints = request.hardcoded_interpreter_constraints else: calculated_constraints = InterpreterConstraints.create_from_targets( all_targets, python_setup ) # If there are no targets, we fall back to the global constraints. This is relevant, # for example, when running `./pants repl` with no specs. interpreter_constraints = calculated_constraints or InterpreterConstraints( python_setup.interpreter_constraints ) sources_digests = [] if request.additional_sources: sources_digests.append(request.additional_sources) if request.include_source_files: sources = await Get(PythonSourceFiles, PythonSourceFilesRequest(all_targets)) else: sources = PythonSourceFiles.empty() additional_inputs_digests = [] if request.additional_inputs: additional_inputs_digests.append(request.additional_inputs) additional_args = request.additional_args if request.include_local_dists: # Note that LocalDistsPexRequest has no `direct_deps_only` mode, so we will build all # local dists in the transitive closure even if the request was for direct_deps_only. # Since we currently use `direct_deps_only` in one case (building a requirements pex # when running pylint) and in that case include_local_dists=False, this seems harmless. local_dists = await Get( LocalDistsPex, LocalDistsPexRequest( request.addresses, internal_only=request.internal_only, interpreter_constraints=interpreter_constraints, sources=sources, ), ) remaining_sources = local_dists.remaining_sources additional_inputs_digests.append(local_dists.pex.digest) additional_args += ("--requirements-pex", local_dists.pex.name) else: remaining_sources = sources remaining_sources_stripped = await Get( StrippedPythonSourceFiles, PythonSourceFiles, remaining_sources ) sources_digests.append(remaining_sources_stripped.stripped_source_files.snapshot.digest) merged_sources_digest, additional_inputs = await MultiGet( Get(Digest, MergeDigests(sources_digests)), Get(Digest, MergeDigests(additional_inputs_digests)), ) requirements = PexRequirements.create_from_requirement_fields( ( tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField) ), additional_requirements=request.additional_requirements, apply_constraints=True, ) description = request.description if requirements: repository_pex: Pex | None = None if python_setup.requirement_constraints: maybe_constraints_repository_pex = await Get( _ConstraintsRepositoryPex, _ConstraintsRepositoryPexRequest( requirements, request.platforms, interpreter_constraints, request.internal_only, request.additional_lockfile_args, ), ) if maybe_constraints_repository_pex.maybe_pex: repository_pex = maybe_constraints_repository_pex.maybe_pex elif ( python_setup.resolve_all_constraints and python_setup.resolve_all_constraints_was_set_explicitly() ): raise ValueError( "`[python].resolve_all_constraints` is enabled, so " "`[python].requirement_constraints` must also be set." ) elif request.resolve_and_lockfile: resolve, lockfile = request.resolve_and_lockfile repository_pex = await Get( Pex, PexRequest( description=f"Installing {lockfile} for the resolve `{resolve}`", output_filename=f"{path_safe(resolve)}_lockfile.pex", internal_only=request.internal_only, requirements=Lockfile( file_path=lockfile, file_path_description_of_origin=( f"the resolve `{resolve}` (from " "`[python].experimental_resolves_to_lockfiles`)" ), # TODO(#12314): Hook up lockfile staleness check. lockfile_hex_digest=None, req_strings=None, ), interpreter_constraints=interpreter_constraints, platforms=request.platforms, additional_args=request.additional_lockfile_args, ), ) elif python_setup.lockfile: repository_pex = await Get( Pex, PexRequest( description=f"Installing {python_setup.lockfile}", output_filename="lockfile.pex", internal_only=request.internal_only, requirements=Lockfile( file_path=python_setup.lockfile, file_path_description_of_origin=( "the option `[python].experimental_lockfile`" ), # TODO(#12314): Hook up lockfile staleness check once multiple lockfiles # are supported. lockfile_hex_digest=None, req_strings=None, ), interpreter_constraints=interpreter_constraints, platforms=request.platforms, additional_args=request.additional_lockfile_args, ), ) requirements = dataclasses.replace(requirements, repository_pex=repository_pex) return PexRequest( output_filename=request.output_filename, internal_only=request.internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, main=request.main, sources=merged_sources_digest, additional_inputs=additional_inputs, additional_args=additional_args, description=description, )
async def infer_python_dependencies_via_source( request: InferPythonImportDependencies, python_infer_subsystem: PythonInferSubsystem, python_setup: PythonSetup, ) -> InferredDependencies: if not python_infer_subsystem.imports and not python_infer_subsystem.assets: return InferredDependencies([]) _wrapped_tgt = await Get(WrappedTarget, Address, request.sources_field.address) tgt = _wrapped_tgt.target parsed_dependencies = await Get( ParsedPythonDependencies, ParsePythonDependenciesRequest( cast(PythonSourceField, request.sources_field), InterpreterConstraints.create_from_targets([tgt], python_setup), string_imports=python_infer_subsystem.string_imports, string_imports_min_dots=python_infer_subsystem. string_imports_min_dots, assets=python_infer_subsystem.assets, assets_min_slashes=python_infer_subsystem.assets_min_slashes, ), ) inferred_deps: set[Address] = set() unowned_imports: set[str] = set() parsed_imports = parsed_dependencies.imports parsed_assets = parsed_dependencies.assets if not python_infer_subsystem.imports: parsed_imports = ParsedPythonImports([]) explicitly_provided_deps = await Get( ExplicitlyProvidedDependencies, DependenciesRequest(tgt[Dependencies])) if parsed_imports: resolve = tgt[PythonResolveField].normalized_value(python_setup) import_deps, unowned_imports = _get_imports_info( address=tgt.address, owners_per_import=await MultiGet( Get( PythonModuleOwners, PythonModuleOwnersRequest(imported_module, resolve=resolve)) for imported_module in parsed_imports), parsed_imports=parsed_imports, explicitly_provided_deps=explicitly_provided_deps, ) inferred_deps.update(import_deps) if parsed_assets: all_asset_targets = await Get(AllAssetTargets, AllAssetTargetsRequest()) assets_by_path = await Get(AllAssetTargetsByPath, AllAssetTargets, all_asset_targets) inferred_deps.update( _get_inferred_asset_deps( tgt.address, request.sources_field.file_path, assets_by_path, parsed_assets, explicitly_provided_deps, )) _maybe_warn_unowned( tgt.address, request.sources_field.file_path, python_infer_subsystem.unowned_dependency_behavior, unowned_imports, parsed_imports, ) return InferredDependencies(sorted(inferred_deps))
async def collect_fixture_configs( _request: CollectFixtureConfigsRequest, pytest: PyTest, python_setup: PythonSetup, test_extra_env: TestExtraEnv, targets: Targets, ) -> CollectedJVMLockfileFixtureConfigs: addresses = [tgt.address for tgt in targets] transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(addresses)) all_targets = transitive_targets.closure interpreter_constraints = InterpreterConstraints.create_from_targets( all_targets, python_setup) pytest_pex, requirements_pex, prepared_sources, root_sources = await MultiGet( Get( Pex, PexRequest( output_filename="pytest.pex", requirements=pytest.pex_requirements(), interpreter_constraints=interpreter_constraints, internal_only=True, ), ), Get(Pex, RequirementsPexRequest(addresses)), Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True, include_resources=True), ), Get( PythonSourceFiles, PythonSourceFilesRequest(targets), ), ) script_content = FileContent(path="collect-fixtures.py", content=COLLECTION_SCRIPT.encode(), is_executable=True) script_digest = await Get(Digest, CreateDigest([script_content])) pytest_runner_pex_get = Get( VenvPex, PexRequest( output_filename="pytest_runner.pex", interpreter_constraints=interpreter_constraints, main=EntryPoint(PurePath(script_content.path).stem), sources=script_digest, internal_only=True, pex_path=[ pytest_pex, requirements_pex, ], ), ) config_file_dirs = list( group_by_dir(prepared_sources.source_files.files).keys()) config_files_get = Get( ConfigFiles, ConfigFilesRequest, pytest.config_request(config_file_dirs), ) pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get, config_files_get) pytest_config_digest = config_files.snapshot.digest input_digest = await Get( Digest, MergeDigests(( prepared_sources.source_files.snapshot.digest, pytest_config_digest, )), ) extra_env = { "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots), **test_extra_env.env, } process = await Get( Process, VenvPexProcess( pytest_runner_pex, argv=[ name for name in root_sources.source_files.files if name.endswith(".py") ], extra_env=extra_env, input_digest=input_digest, output_files=("tests.json", ), description="Collect test lockfile requirements from all tests.", level=LogLevel.DEBUG, cache_scope=ProcessCacheScope.PER_SESSION, ), ) result = await Get(ProcessResult, Process, process) digest_contents = await Get(DigestContents, Digest, result.output_digest) assert len(digest_contents) == 1 assert digest_contents[0].path == "tests.json" raw_config_data = json.loads(digest_contents[0].content) configs = [] for item in raw_config_data: config = JVMLockfileFixtureConfig( definition=JVMLockfileFixtureDefinition.from_kwargs( item["kwargs"]), test_file_path=item["test_file_path"], ) configs.append(config) return CollectedJVMLockfileFixtureConfigs(configs)
async def export_virtualenv(request: _ExportVenvRequest, python_setup: PythonSetup, pex_pex: PexPEX) -> ExportResult: if request.resolve: interpreter_constraints = InterpreterConstraints( python_setup.resolves_to_interpreter_constraints.get( request.resolve, python_setup.interpreter_constraints)) else: interpreter_constraints = InterpreterConstraints.create_from_targets( request.root_python_targets, python_setup) or InterpreterConstraints( python_setup.interpreter_constraints) min_interpreter = interpreter_constraints.snap_to_minimum( python_setup.interpreter_universe) if not min_interpreter: err_msg = (( f"The resolve '{request.resolve}' (from `[python].resolves`) has invalid interpreter " f"constraints, which are set via `[python].resolves_to_interpreter_constraints`: " f"{interpreter_constraints}. Could not determine the minimum compatible interpreter." ) if request.resolve else ( "The following interpreter constraints were computed for all the targets for which " f"export was requested: {interpreter_constraints}. There is no python interpreter " "compatible with these constraints. Please restrict the target set to one that shares " "a compatible interpreter.")) raise ExportError(err_msg) requirements_pex = await Get( Pex, RequirementsPexRequest( (tgt.address for tgt in request.root_python_targets), internal_only=True, hardcoded_interpreter_constraints=min_interpreter, ), ) # Get the full python version (including patch #), so we can use it as the venv name. res = await Get( ProcessResult, PexProcess( pex=requirements_pex, description="Get interpreter version", argv=[ "-c", "import sys; print('.'.join(str(x) for x in sys.version_info[0:3]))" ], ), ) py_version = res.stdout.strip().decode() dest = (os.path.join("python", "virtualenvs", path_safe(request.resolve)) if request.resolve else os.path.join("python", "virtualenv")) merged_digest = await Get( Digest, MergeDigests([pex_pex.digest, requirements_pex.digest])) pex_pex_path = os.path.join("{digest_root}", pex_pex.exe) return ExportResult( f"virtualenv for the resolve '{request.resolve}' (using {min_interpreter})", dest, digest=merged_digest, post_processing_cmds=[ PostProcessingCommand( [ pex_pex_path, os.path.join("{digest_root}", requirements_pex.name), "venv", "--pip", "--collisions-ok", "--remove=all", f"{{digest_root}}/{py_version}", ], {"PEX_MODULE": "pex.tools"}, ), PostProcessingCommand(["rm", "-f", pex_pex_path]), ], )
async def infer_python_dependencies_via_source( request: InferPythonImportDependencies, python_infer_subsystem: PythonInferSubsystem, python_setup: PythonSetup, ) -> InferredDependencies: if not python_infer_subsystem.imports and not python_infer_subsystem.assets: return InferredDependencies([]) _wrapped_tgt = await Get( WrappedTarget, WrappedTargetRequest(request.sources_field.address, description_of_origin="<infallible>"), ) tgt = _wrapped_tgt.target interpreter_constraints = InterpreterConstraints.create_from_targets([tgt], python_setup) if interpreter_constraints is None: # TODO: This would represent a target with a PythonSource field, but no # InterpreterConstraints field. #15400 would allow inference to require both # fields. return InferredDependencies([]) parsed_dependencies = await Get( ParsedPythonDependencies, ParsePythonDependenciesRequest( cast(PythonSourceField, request.sources_field), interpreter_constraints, string_imports=python_infer_subsystem.string_imports, string_imports_min_dots=python_infer_subsystem.string_imports_min_dots, assets=python_infer_subsystem.assets, assets_min_slashes=python_infer_subsystem.assets_min_slashes, ), ) inferred_deps: set[Address] = set() unowned_imports: set[str] = set() parsed_imports = parsed_dependencies.imports parsed_assets = parsed_dependencies.assets if not python_infer_subsystem.imports: parsed_imports = ParsedPythonImports([]) explicitly_provided_deps = await Get( ExplicitlyProvidedDependencies, DependenciesRequest(tgt[Dependencies]) ) resolve = tgt[PythonResolveField].normalized_value(python_setup) if parsed_imports: import_deps, unowned_imports = _get_imports_info( address=tgt.address, owners_per_import=await MultiGet( Get(PythonModuleOwners, PythonModuleOwnersRequest(imported_module, resolve=resolve)) for imported_module in parsed_imports ), parsed_imports=parsed_imports, explicitly_provided_deps=explicitly_provided_deps, ) inferred_deps.update(import_deps) if parsed_assets: all_asset_targets = await Get(AllAssetTargets, AllAssetTargetsRequest()) assets_by_path = await Get(AllAssetTargetsByPath, AllAssetTargets, all_asset_targets) inferred_deps.update( _get_inferred_asset_deps( tgt.address, request.sources_field.file_path, assets_by_path, parsed_assets, explicitly_provided_deps, ) ) _ = await _handle_unowned_imports( tgt.address, python_infer_subsystem.unowned_dependency_behavior, python_setup, unowned_imports, parsed_imports, resolve=resolve, ) return InferredDependencies(sorted(inferred_deps))
async def package_python_dist( field_set: PythonDistributionFieldSet, python_setup: PythonSetup, union_membership: UnionMembership, ) -> BuiltPackage: transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])) exported_target = ExportedTarget(transitive_targets.roots[0]) dist_tgt = exported_target.target wheel = dist_tgt.get(WheelField).value sdist = dist_tgt.get(SDistField).value if not wheel and not sdist: raise NoDistTypeSelected( softwrap( f""" In order to package {dist_tgt.address.spec} at least one of {WheelField.alias!r} or {SDistField.alias!r} must be `True`. """ ) ) wheel_config_settings = dist_tgt.get(WheelConfigSettingsField).value or FrozenDict() sdist_config_settings = dist_tgt.get(SDistConfigSettingsField).value or FrozenDict() backend_env_vars = dist_tgt.get(BuildBackendEnvVarsField).value if backend_env_vars: extra_build_time_env = await Get(Environment, EnvironmentRequest(sorted(backend_env_vars))) else: extra_build_time_env = Environment() interpreter_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup ) or InterpreterConstraints(python_setup.interpreter_constraints) chroot = await Get( DistBuildChroot, DistBuildChrootRequest( exported_target, interpreter_constraints=interpreter_constraints, ), ) # Find the source roots for the build-time 1stparty deps (e.g., deps of setup.py). source_roots_result = await Get( SourceRootsResult, SourceRootsRequest( files=[], dirs={PurePath(tgt.address.spec_path) for tgt in transitive_targets.closure} ), ) source_roots = tuple(sorted({sr.path for sr in source_roots_result.path_to_root.values()})) # Get any extra build-time environment (e.g., native extension requirements). build_env_requests = [] build_env_request_types = union_membership.get(DistBuildEnvironmentRequest) for build_env_request_type in build_env_request_types: if build_env_request_type.is_applicable(dist_tgt): build_env_requests.append( build_env_request_type( tuple(tt.address for tt in transitive_targets.closure), interpreter_constraints ) ) build_envs = await MultiGet( [ Get(DistBuildEnvironment, DistBuildEnvironmentRequest, build_env_request) for build_env_request in build_env_requests ] ) extra_build_time_requirements = tuple( itertools.chain.from_iterable( build_env.extra_build_time_requirements for build_env in build_envs ) ) input_digest = await Get( Digest, MergeDigests( [chroot.digest, *(build_env.extra_build_time_inputs for build_env in build_envs)] ), ) # We prefix the entire chroot, and run with this prefix as the cwd, so that we can capture # any changes setup made within it without also capturing other artifacts of the pex # process invocation. chroot_prefix = "chroot" working_directory = os.path.join(chroot_prefix, chroot.working_directory) prefixed_input = await Get(Digest, AddPrefix(input_digest, chroot_prefix)) build_system = await Get(BuildSystem, BuildSystemRequest(prefixed_input, working_directory)) setup_py_result = await Get( DistBuildResult, DistBuildRequest( build_system=build_system, interpreter_constraints=interpreter_constraints, build_wheel=wheel, build_sdist=sdist, input=prefixed_input, working_directory=working_directory, build_time_source_roots=source_roots, target_address_spec=exported_target.target.address.spec, wheel_config_settings=wheel_config_settings, sdist_config_settings=sdist_config_settings, extra_build_time_requirements=extra_build_time_requirements, extra_build_time_env=extra_build_time_env, ), ) dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output) return BuiltPackage( setup_py_result.output, tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files), )