def test_get_pyenv_paths(self): local_pyenv_version = "3.5.5" all_pyenv_versions = ["2.7.14", local_pyenv_version] self.create_file(".python-version", local_pyenv_version + "\n") with fake_pyenv_root(all_pyenv_versions, local_pyenv_version) as ( pyenv_root, expected_paths, expected_local_paths, ): paths = PythonSetup.get_pyenv_paths( pyenv_root_func=lambda: pyenv_root) local_paths = PythonSetup.get_pyenv_paths( pyenv_root_func=lambda: pyenv_root, pyenv_local=True) assert expected_paths == paths assert expected_local_paths == local_paths
def test_get_pyenv_paths(rule_runner: RuleRunner) -> None: local_pyenv_version = "3.5.5" all_pyenv_versions = ["2.7.14", local_pyenv_version] rule_runner.write_files({".python-version": f"{local_pyenv_version}\n"}) with fake_pyenv_root(all_pyenv_versions, local_pyenv_version) as ( pyenv_root, expected_paths, expected_local_paths, ): paths = PythonSetup.get_pyenv_paths(Environment({"PYENV_ROOT": pyenv_root})) local_paths = PythonSetup.get_pyenv_paths( Environment({"PYENV_ROOT": pyenv_root}), pyenv_local=True ) assert expected_paths == paths assert expected_local_paths == local_paths
def __init__(self, target, root_dir, extra_targets=None, extra_requirements=None, builder=None, platforms=None, interpreter=None, conn_timeout=None): self._config = Config.load() self._target = target self._root = root_dir self._platforms = platforms self._interpreter = interpreter or PythonInterpreter.get() self._extra_targets = list( extra_targets) if extra_targets is not None else [] self._extra_requirements = list( extra_requirements) if extra_requirements is not None else [] self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter) # Note: unrelated to the general pants artifact cache. self._egg_cache_root = os.path.join( PythonSetup(self._config).scratch_dir('artifact_cache', default_name='artifacts'), str(self._interpreter.identity)) self._key_generator = CacheKeyGenerator() self._build_invalidator = BuildInvalidator(self._egg_cache_root)
def test_pex_execution(self) -> None: input_files_content = InputFilesContent( ( FileContent(path="main.py", content=b'print("from main")'), FileContent(path="subdir/sub.py", content=b'print("from sub")'), ) ) input_files = self.request_single_product(Digest, input_files_content) pex_output = self.create_pex_and_get_all_data(entry_point="main", input_files=input_files) pex_files = pex_output["files"] self.assertTrue("pex" not in pex_files) self.assertTrue("main.py" in pex_files) self.assertTrue("subdir/sub.py" in pex_files) init_subsystem(PythonSetup) python_setup = PythonSetup.global_instance() env = {"PATH": create_path_env_var(python_setup.interpreter_search_paths)} req = ExecuteProcessRequest( argv=("python", "test.pex"), env=env, input_files=pex_output["pex"].directory_digest, description="Run the pex and make sure it works", ) result = self.request_single_product(ExecuteProcessResult, req) self.assertEqual(result.stdout, b"from main\n")
def test_expand_interpreter_search_paths(self): local_pyenv_version = "3.5.5" all_pyenv_versions = ["2.7.14", local_pyenv_version] self.create_file(".python-version", local_pyenv_version + "\n") with environment_as(PATH="/env/path1:/env/path2"): with setup_pexrc_with_pex_python_path( ["/pexrc/path1:/pexrc/path2"]): with fake_pyenv_root(all_pyenv_versions, local_pyenv_version) as ( pyenv_root, expected_pyenv_paths, expected_pyenv_local_paths, ): paths = [ "/foo", "<PATH>", "/bar", "<PEXRC>", "/baz", "<PYENV>", "<PYENV_LOCAL>", "/qux", ] expanded_paths = PythonSetup.expand_interpreter_search_paths( paths, pyenv_root_func=lambda: pyenv_root) expected = ([ "/foo", "/env/path1", "/env/path2", "/bar", "/pexrc/path1", "/pexrc/path2", "/baz" ] + expected_pyenv_paths + expected_pyenv_local_paths + ["/qux"]) self.assertListEqual(expected, expanded_paths)
def value_or_global_default(self, python_setup: PythonSetup) -> Tuple[str, ...]: """Return either the given `compatibility` field or the global interpreter constraints. If interpreter constraints are supplied by the CLI flag, return those only. """ return python_setup.compatibility_or_constraints(self.value)
def test_get_asdf_paths(rule_runner: RuleRunner) -> None: # 3.9.4 is intentionally "left out" so that it's only found if the "all installs" fallback is # used all_python_versions = ["2.7.14", "3.5.5", "3.7.10", "3.9.4", "3.9.5"] asdf_home_versions = [0, 1, 2] asdf_local_versions = [2, 1, 4] asdf_local_versions_str = " ".join( materialize_indices(all_python_versions, asdf_local_versions) ) rule_runner.write_files( { ".tool-versions": ( "nodejs 16.0.1\n" "java current\n" f"python {asdf_local_versions_str}\n" "rust 1.52.0\n" ) } ) with fake_asdf_root(all_python_versions, asdf_home_versions, asdf_local_versions) as ( home_dir, asdf_dir, expected_asdf_paths, expected_asdf_home_paths, expected_asdf_local_paths, ): # Check the "all installed" fallback all_paths = PythonSetup.get_asdf_paths(Environment({"ASDF_DATA_DIR": asdf_dir})) home_paths = PythonSetup.get_asdf_paths( Environment({"HOME": home_dir, "ASDF_DATA_DIR": asdf_dir}) ) local_paths = PythonSetup.get_asdf_paths( Environment({"HOME": home_dir, "ASDF_DATA_DIR": asdf_dir}), asdf_local=True ) # The order the filesystem returns the "installed" folders is arbitrary assert set(expected_asdf_paths) == set(all_paths) # These have a fixed order defined by the `.tool-versions` file assert expected_asdf_home_paths == home_paths assert expected_asdf_local_paths == local_paths
def test_is_python2(constraints, compatibilities): Subsystem.reset() init_subsystem( PythonSetup, { PythonSetup.options_scope: { "interpreter_constraints": RankedValue(Rank.CONFIG, constraints) } }, ) assert is_python2(compatibilities, PythonSetup.global_instance())
def resolve_multi(config, requirements, interpreter=None, platforms=None, conn_timeout=None, ttl=3600): """Multi-platform dependency resolution for PEX files. Given a pants configuration and a set of requirements, return a list of distributions that must be included in order to satisfy them. That may involve distributions for multiple platforms. :param config: Pants :class:`Config` object. :param requirements: A list of :class:`PythonRequirement` objects to resolve. :param interpreter: :class:`PythonInterpreter` for which requirements should be resolved. If None specified, defaults to current interpreter. :param platforms: Optional list of platforms against requirements will be resolved. If None specified, the defaults from `config` will be used. :param conn_timeout: Optional connection timeout for any remote fetching. :param ttl: Time in seconds before we consider re-resolving an open-ended requirement, e.g. "flask>=0.2" if a matching distribution is available on disk. Defaults to 3600. """ distributions = dict() interpreter = interpreter or PythonInterpreter.get() if not isinstance(interpreter, PythonInterpreter): raise TypeError( 'Expected interpreter to be a PythonInterpreter, got %s' % type(interpreter)) install_cache = PythonSetup(config).scratch_dir('install_cache', default_name='eggs') platforms = get_platforms( platforms or config.getlist('python-setup', 'platforms', ['current'])) for platform in platforms: translator = Translator.default(install_cache=install_cache, interpreter=interpreter, platform=platform, conn_timeout=conn_timeout) obtainer = PantsObtainer( install_cache=install_cache, crawler=crawler_from_config(config, conn_timeout=conn_timeout), fetchers=fetchers_from_config(config) or [PyPIFetcher()], translators=translator) distributions[platform] = resolve(requirements=requirements, obtainer=obtainer, interpreter=interpreter, platform=platform) return distributions
def create_from_adaptors( cls, adaptors: Iterable[TargetAdaptor], python_setup: PythonSetup) -> "PexInterpreterConstraints": constraint_sets: Set[Iterable[str]] = set() for adaptor in adaptors: if not isinstance(adaptor, PythonTargetAdaptor): continue constraint_set = python_setup.compatibility_or_constraints( adaptor.compatibility) constraint_sets.add(constraint_set) # This will OR within each target and AND across targets. merged_constraints = cls.merge_constraint_sets(constraint_sets) return PexInterpreterConstraints(merged_constraints)
def create(cls, builder, log=None): options = cls.global_instance().get_options() setuptools_requirement = f"setuptools=={options.setuptools_version}" log = log or logging.getLogger(__name__) return PexBuilderWrapper( builder=builder, python_repos_subsystem=PythonRepos.global_instance(), python_setup_subsystem=PythonSetup.global_instance(), setuptools_requirement=PythonRequirement( setuptools_requirement), log=log, )
def _acceptable_interpreter_constraints(self): default_constraints = PythonSetup.global_instance( ).interpreter_constraints whitelisted_constraints = self.get_options( ).interpreter_constraints_whitelist # The user wants to lint everything. if whitelisted_constraints == []: return [] # The user did not pass a whitelist option. elif whitelisted_constraints is None: whitelisted_constraints = () return [ version.parse(v) for v in default_constraints + whitelisted_constraints ]
def test_expand_interpreter_search_paths(self): with environment_as(PATH="/env/path1:/env/path2"): with setup_pexrc_with_pex_python_path( ["/pexrc/path1:/pexrc/path2"]): with fake_pyenv_root(["2.7.14", "3.5.5" ]) as (pyenv_root, expected_pyenv_paths): paths = [ "/foo", "<PATH>", "/bar", "<PEXRC>", "/baz", "<PYENV>", "/qux" ] expanded_paths = PythonSetup.expand_interpreter_search_paths( paths, pyenv_root_func=lambda: pyenv_root) expected = ([ "/foo", "/env/path1", "/env/path2", "/bar", "/pexrc/path1", "/pexrc/path2", "/baz" ] + expected_pyenv_paths + ["/qux"]) self.assertListEqual(expected, expanded_paths)
def test_expand_interpreter_search_paths(rule_runner: RuleRunner) -> None: local_pyenv_version = "3.5.5" all_pyenv_versions = ["2.7.14", local_pyenv_version] rule_runner.create_file(".python-version", local_pyenv_version + "\n") with setup_pexrc_with_pex_python_path(["/pexrc/path1:/pexrc/path2"]): with fake_pyenv_root(all_pyenv_versions, local_pyenv_version) as ( pyenv_root, expected_pyenv_paths, expected_pyenv_local_paths, ): paths = [ "/foo", "<PATH>", "/bar", "<PEXRC>", "/baz", "<PYENV>", "<PYENV_LOCAL>", "/qux", ] env = Environment({ "PATH": "/env/path1:/env/path2", "PYENV_ROOT": pyenv_root }) expanded_paths = PythonSetup.expand_interpreter_search_paths( paths, env, ) expected = [ "/foo", "/env/path1", "/env/path2", "/bar", "/pexrc/path1", "/pexrc/path2", "/baz", *expected_pyenv_paths, *expected_pyenv_local_paths, "/qux", ] assert expected == expanded_paths
def test_get_environment_paths() -> None: paths = PythonSetup.get_environment_paths( Environment({"PATH": "foo/bar:baz:/qux/quux"})) assert ["foo/bar", "baz", "/qux/quux"] == paths
def test_get_pyenv_paths(self): with fake_pyenv_root(["2.7.14", "3.5.5"]) as (pyenv_root, expected_paths): paths = PythonSetup.get_pyenv_paths( pyenv_root_func=lambda: pyenv_root) self.assertListEqual(expected_paths, paths)
def test_get_pex_python_paths(self): with setup_pexrc_with_pex_python_path(["foo/bar", "baz", "/qux/quux"]): paths = PythonSetup.get_pex_python_paths() self.assertListEqual(["foo/bar", "baz", "/qux/quux"], paths)
async def find_pex_python( python_setup: PythonSetup, pex_runtime_env: PexRuntimeEnvironment, subprocess_env_vars: SubprocessEnvironmentVars, global_options: GlobalOptions, ) -> PexEnvironment: pex_relevant_environment = await Get( Environment, EnvironmentRequest(["PATH", "HOME", "PYENV_ROOT"])) # PEX files are compatible with bootstrapping via Python 2.7 or Python 3.5+. The bootstrap # code will then re-exec itself if the underlying PEX user code needs a more specific python # interpreter. As such, we look for many Pythons usable by the PEX bootstrap code here for # maximum flexibility. all_python_binary_paths = await MultiGet( Get( BinaryPaths, BinaryPathRequest( search_path=python_setup.interpreter_search_paths( pex_relevant_environment), binary_name=binary_name, test=BinaryPathTest( args=[ "-c", # N.B.: The following code snippet must be compatible with Python 2.7 and # Python 3.5+. # # We hash the underlying Python interpreter executable to ensure we detect # changes in the real interpreter that might otherwise be masked by Pyenv # shim scripts found on the search path. Naively, just printing out the full # version_info would be enough, but that does not account for supported abi # changes (e.g.: a pyenv switch from a py27mu interpreter to a py27m # interpreter.) # # When hashing, we pick 8192 for efficiency of reads and fingerprint updates # (writes) since it's a common OS buffer size and an even multiple of the # hash block size. dedent("""\ import sys major, minor = sys.version_info[:2] if (major, minor) != (2, 7) and not (major == 3 and minor >= 5): sys.exit(1) import hashlib hasher = hashlib.sha256() with open(sys.executable, "rb") as fp: for chunk in iter(lambda: fp.read(8192), b""): hasher.update(chunk) sys.stdout.write(hasher.hexdigest()) """), ], fingerprint_stdout= False, # We already emit a usable fingerprint to stdout. ), ), ) for binary_name in pex_runtime_env.bootstrap_interpreter_names) def first_python_binary() -> Optional[PythonExecutable]: for binary_paths in all_python_binary_paths: if binary_paths.first_path: return PythonExecutable( path=binary_paths.first_path.path, fingerprint=binary_paths.first_path.fingerprint, ) return None return PexEnvironment( path=pex_runtime_env.path(pex_relevant_environment), interpreter_search_paths=tuple( python_setup.interpreter_search_paths(pex_relevant_environment)), subprocess_environment_dict=subprocess_env_vars.vars, # TODO: This path normalization is duplicated with `engine_initializer.py`. How can we do # the normalization only once, via the options system? named_caches_dir=Path( global_options.options.named_caches_dir).resolve().as_posix(), bootstrap_python=first_python_binary(), )
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get(TransitiveTargets, Addresses, request.addresses) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets) ) input_digests.append(prepared_sources.stripped_source_files.snapshot.digest) merged_input_digest = await Get(Digest, MergeDigests(input_digests)) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( ( tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility) ), python_setup, ) exact_reqs = PexRequirements.create_from_requirement_fields( ( tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField) ), additional_requirements=request.additional_requirements, ) requirements = exact_reqs description = request.description if python_setup.requirement_constraints: # In requirement strings Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let # packaging canonicalize for us. # See: https://www.python.org/dev/peps/pep-0503/#normalized-names exact_req_projects = { canonicalize_project_name(Requirement.parse(req).project_name) for req in exact_reqs } constraints_file_contents = await Get( DigestContents, PathGlobs( [python_setup.requirement_constraints], glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the option `--python-setup-requirement-constraints`", ), ) constraints_file_reqs = set( parse_requirements(next(iter(constraints_file_contents)).content.decode()) ) constraint_file_projects = { canonicalize_project_name(req.project_name) for req in constraints_file_reqs } unconstrained_projects = exact_req_projects - constraint_file_projects if unconstrained_projects: logger.warning( f"The constraints file {python_setup.requirement_constraints} does not contain " f"entries for the following requirements: {', '.join(unconstrained_projects)}" ) if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or ( python_setup.resolve_all_constraints == ResolveAllConstraintsOption.NONDEPLOYABLES and request.internal_only ): if unconstrained_projects: logger.warning( "Ignoring resolve_all_constraints setting in [python_setup] scope " "because constraints file does not cover all requirements." ) else: requirements = PexRequirements(str(req) for req in constraints_file_reqs) description = description or f"Resolving {python_setup.requirement_constraints}" elif ( python_setup.resolve_all_constraints != ResolveAllConstraintsOption.NEVER and python_setup.resolve_all_constraints_was_set_explicitly() ): raise ValueError( f"[python-setup].resolve_all_constraints is set to " f"{python_setup.resolve_all_constraints.value}, so " f"[python-setup].requirement_constraints must also be provided." ) return PexRequest( output_filename=request.output_filename, internal_only=request.internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=description, )
def test_expand_interpreter_search_paths(rule_runner: RuleRunner) -> None: local_pyenv_version = "3.5.5" all_python_versions = ["2.7.14", local_pyenv_version, "3.7.10", "3.9.4", "3.9.5"] asdf_home_versions = [0, 1, 2] asdf_local_versions = [2, 1, 4] asdf_local_versions_str = " ".join( materialize_indices(all_python_versions, asdf_local_versions) ) rule_runner.write_files( { ".python-version": f"{local_pyenv_version}\n", ".tool-versions": ( "nodejs 16.0.1\n" "java current\n" f"python {asdf_local_versions_str}\n" "rust 1.52.0\n" ), } ) with setup_pexrc_with_pex_python_path(["/pexrc/path1:/pexrc/path2"]): with fake_asdf_root(all_python_versions, asdf_home_versions, asdf_local_versions) as ( home_dir, asdf_dir, expected_asdf_paths, expected_asdf_home_paths, expected_asdf_local_paths, ), fake_pyenv_root(all_python_versions, local_pyenv_version) as ( pyenv_root, expected_pyenv_paths, expected_pyenv_local_paths, ): paths = [ "/foo", "<PATH>", "/bar", "<PEXRC>", "/baz", "<ASDF>", "<ASDF_LOCAL>", "<PYENV>", "<PYENV_LOCAL>", "/qux", ] env = Environment( { "HOME": home_dir, "PATH": "/env/path1:/env/path2", "PYENV_ROOT": pyenv_root, "ASDF_DATA_DIR": asdf_dir, } ) expanded_paths = PythonSetup.expand_interpreter_search_paths( paths, env, ) expected = [ "/foo", "/env/path1", "/env/path2", "/bar", "/pexrc/path1", "/pexrc/path2", "/baz", *expected_asdf_home_paths, *expected_asdf_local_paths, *expected_pyenv_paths, *expected_pyenv_local_paths, "/qux", ] assert expected == expanded_paths
async def run_setup_pys( targets_with_origins: TargetsWithOrigins, setup_py_subsystem: SetupPySubsystem, console: Console, python_setup: PythonSetup, distdir: DistDir, workspace: Workspace, union_membership: UnionMembership, ) -> SetupPy: """Run setup.py commands on all exported targets addressed.""" validate_args(setup_py_subsystem.args) # Get all exported targets, ignoring any non-exported targets that happened to be # globbed over, but erroring on any explicitly-requested non-exported targets. exported_targets: List[ExportedTarget] = [] explicit_nonexported_targets: List[Target] = [] for target_with_origin in targets_with_origins: tgt = target_with_origin.target if _is_exported(tgt): exported_targets.append(ExportedTarget(tgt)) elif isinstance(target_with_origin.origin, SingleAddress): explicit_nonexported_targets.append(tgt) if explicit_nonexported_targets: raise TargetNotExported( "Cannot run setup.py on these targets, because they have no `provides=` clause: " f'{", ".join(so.address.spec for so in explicit_nonexported_targets)}' ) if setup_py_subsystem.transitive: # Expand out to all owners of the entire dep closure. transitive_targets = await Get( TransitiveTargets, Addresses(et.target.address for et in exported_targets)) owners = await MultiGet( Get(ExportedTarget, OwnedDependency(tgt)) for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)) exported_targets = list(FrozenOrderedSet(owners)) py2 = is_python2( python_setup.compatibilities_or_constraints( target_with_origin.target.get(PythonInterpreterCompatibility).value for target_with_origin in targets_with_origins)) chroots = await MultiGet( Get(SetupPyChroot, SetupPyChrootRequest(exported_target, py2)) for exported_target in exported_targets) # If args were provided, run setup.py with them; Otherwise just dump chroots. if setup_py_subsystem.args: setup_py_results = await MultiGet( Get( RunSetupPyResult, RunSetupPyRequest(exported_target, chroot, setup_py_subsystem.args), ) for exported_target, chroot in zip(exported_targets, chroots)) for exported_target, setup_py_result in zip(exported_targets, setup_py_results): addr = exported_target.target.address.spec console.print_stderr( f"Writing dist for {addr} under {distdir.relpath}/.") workspace.write_digest(setup_py_result.output, path_prefix=str(distdir.relpath)) else: # Just dump the chroot. for exported_target, chroot in zip(exported_targets, chroots): addr = exported_target.target.address.spec provides = exported_target.provides setup_py_dir = distdir.relpath / f"{provides.name}-{provides.version}" console.print_stderr( f"Writing setup.py chroot for {addr} to {setup_py_dir}") workspace.write_digest(chroot.digest, path_prefix=str(setup_py_dir)) return SetupPy(0)
def _python_setup(self): return PythonSetup.global_instance()
def crawler_from_config(config, conn_timeout=None): download_cache = PythonSetup(config).scratch_dir('download_cache', default_name='downloads') return Crawler(cache=download_cache, conn_timeout=conn_timeout)
def _compatible_interpreter(self, unpacked_whls): constraints = PythonSetup.global_instance( ).compatibility_or_constraints(unpacked_whls.compatibility) allowable_interpreters = PythonInterpreterCache.global_instance( ).setup(filters=constraints) return min(allowable_interpreters)
def test_get_pex_python_paths() -> None: with setup_pexrc_with_pex_python_path(["foo/bar", "baz", "/qux/quux"]): paths = PythonSetup.get_pex_python_paths() assert ["foo/bar", "baz", "/qux/quux"] == paths
def _cache_dir(config): return PythonSetup(config).scratch_dir('interpreter_cache', default_name='interpreters')
async def pex_from_targets( request: PexFromTargetsRequest, python_setup: PythonSetup, constraints_file: MaybeConstraintsFile, ) -> PexRequest: if request.direct_deps_only: targets = await Get(Targets, Addresses(request.addresses)) direct_deps = await MultiGet( Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets ) all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets)) else: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest(request.addresses) ) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets) ) input_digests.append(prepared_sources.stripped_source_files.snapshot.digest) merged_input_digest = await Get(Digest, MergeDigests(input_digests)) if request.hardcoded_interpreter_constraints: interpreter_constraints = request.hardcoded_interpreter_constraints else: calculated_constraints = PexInterpreterConstraints.create_from_targets( all_targets, python_setup ) # If there are no targets, we fall back to the global constraints. This is relevant, # for example, when running `./pants repl` with no specs. interpreter_constraints = calculated_constraints or PexInterpreterConstraints( python_setup.interpreter_constraints ) exact_reqs = PexRequirements.create_from_requirement_fields( ( tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField) ), additional_requirements=request.additional_requirements, ) requirements = exact_reqs repository_pex: Pex | None = None description = request.description if constraints_file.path: constraints_file_contents = await Get(DigestContents, Digest, constraints_file.digest) constraints_file_reqs = set( parse_requirements_file( constraints_file_contents[0].content.decode(), rel_path=constraints_file.path, ) ) # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let # packaging canonicalize for us. # See: https://www.python.org/dev/peps/pep-0503/#normalized-names url_reqs = set() # E.g., 'foobar@ git+https://github.com/foo/bar.git@branch' name_reqs = set() # E.g., foobar>=1.2.3 name_req_projects = set() for req_str in exact_reqs: req = Requirement.parse(req_str) if req.url: # type: ignore[attr-defined] url_reqs.add(req) else: name_reqs.add(req) name_req_projects.add(canonicalize_project_name(req.project_name)) constraint_file_projects = { canonicalize_project_name(req.project_name) for req in constraints_file_reqs } # Constraints files must only contain name reqs, not URL reqs (those are already # constrained by their very nature). See https://github.com/pypa/pip/issues/8210. unconstrained_projects = name_req_projects - constraint_file_projects if unconstrained_projects: constraints_descr = ( f"constraints file {constraints_file.path}" if python_setup.requirement_constraints else f"_python_constraints target {python_setup.requirement_constraints_target}" ) logger.warning( f"The {constraints_descr} does not contain entries for the following " f"requirements: {', '.join(unconstrained_projects)}" ) if python_setup.resolve_all_constraints: if unconstrained_projects: logger.warning( "Ignoring `[python_setup].resolve_all_constraints` option because constraints " "file does not cover all requirements." ) else: # To get a full set of requirements we must add the URL requirements to the # constraints file, since the latter cannot contain URL requirements. # NB: We can only add the URL requirements we know about here, i.e., those that # are transitive deps of the targets in play. There may be others in the repo. # So we may end up creating a few different repository pexes, each with identical # name requirements but different subsets of URL requirements. Fortunately since # all these repository pexes will have identical pinned versions of everything, # this is not a correctness issue, only a performance one. # TODO: Address this as part of providing proper lockfile support. However we # generate lockfiles, they must be able to include URL requirements. all_constraints = {str(req) for req in (constraints_file_reqs | url_reqs)} repository_pex = await Get( Pex, PexRequest( description=f"Resolving {python_setup.requirement_constraints}", output_filename="repository.pex", internal_only=request.internal_only, requirements=PexRequirements(all_constraints), interpreter_constraints=interpreter_constraints, platforms=request.platforms, additional_args=["-vvv"], ), ) elif ( python_setup.resolve_all_constraints and python_setup.resolve_all_constraints_was_set_explicitly() ): raise ValueError( "[python-setup].resolve_all_constraints is enabled, so either " "[python-setup].requirement_constraints or " "[python-setup].requirement_constraints_target must also be provided." ) return PexRequest( output_filename=request.output_filename, internal_only=request.internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, main=request.main, sources=merged_input_digest, additional_inputs=request.additional_inputs, repository_pex=repository_pex, additional_args=request.additional_args, description=description, )
def test_get_environment_paths(self): with environment_as(PATH="foo/bar:baz:/qux/quux"): paths = PythonSetup.get_environment_paths() self.assertListEqual(["foo/bar", "baz", "/qux/quux"], paths)
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: if request.direct_deps_only: targets = await Get(Targets, Addresses(request.addresses)) direct_deps = await MultiGet( Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets) all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets)) else: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest(request.addresses)) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get(StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)) input_digests.append( prepared_sources.stripped_source_files.snapshot.digest) merged_input_digest = await Get(Digest, MergeDigests(input_digests)) if request.hardcoded_interpreter_constraints: interpreter_constraints = request.hardcoded_interpreter_constraints else: calculated_constraints = InterpreterConstraints.create_from_targets( all_targets, python_setup) # If there are no targets, we fall back to the global constraints. This is relevant, # for example, when running `./pants repl` with no specs. interpreter_constraints = calculated_constraints or InterpreterConstraints( python_setup.interpreter_constraints) requirements = PexRequirements.create_from_requirement_fields( (tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField)), additional_requirements=request.additional_requirements, apply_constraints=True, ) description = request.description if requirements: repository_pex: Pex | None = None if python_setup.requirement_constraints: maybe_constraints_repository_pex = await Get( _ConstraintsRepositoryPex, _ConstraintsRepositoryPexRequest( requirements, request.platforms, interpreter_constraints, request.internal_only, request.additional_lockfile_args, ), ) if maybe_constraints_repository_pex.maybe_pex: repository_pex = maybe_constraints_repository_pex.maybe_pex elif (python_setup.resolve_all_constraints and python_setup.resolve_all_constraints_was_set_explicitly()): raise ValueError( "`[python-setup].resolve_all_constraints` is enabled, so " "`[python-setup].requirement_constraints` must also be set.") elif request.resolve_and_lockfile: resolve, lockfile = request.resolve_and_lockfile repository_pex = await Get( Pex, PexRequest( description= f"Installing {lockfile} for the resolve `{resolve}`", output_filename=f"{path_safe(resolve)}_lockfile.pex", internal_only=request.internal_only, requirements=Lockfile( file_path=lockfile, file_path_description_of_origin= (f"the resolve `{resolve}` (from " "`[python-setup].experimental_resolves_to_lockfiles`)" ), # TODO(#12314): Hook up lockfile staleness check. lockfile_hex_digest=None, req_strings=None, ), interpreter_constraints=interpreter_constraints, platforms=request.platforms, additional_args=request.additional_lockfile_args, ), ) elif python_setup.lockfile: repository_pex = await Get( Pex, PexRequest( description=f"Installing {python_setup.lockfile}", output_filename="lockfile.pex", internal_only=request.internal_only, requirements=Lockfile( file_path=python_setup.lockfile, file_path_description_of_origin=( "the option `[python-setup].experimental_lockfile`" ), # TODO(#12314): Hook up lockfile staleness check once multiple lockfiles # are supported. lockfile_hex_digest=None, req_strings=None, ), interpreter_constraints=interpreter_constraints, platforms=request.platforms, additional_args=request.additional_lockfile_args, ), ) requirements = dataclasses.replace(requirements, repository_pex=repository_pex) return PexRequest( output_filename=request.output_filename, internal_only=request.internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, main=request.main, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=description, )
async def pex_from_targets( request: PexFromTargetsRequest, python_setup: PythonSetup, constraints_file: MaybeConstraintsFile, ) -> PexRequest: if request.direct_deps_only: targets = await Get(Targets, Addresses(request.addresses)) direct_deps = await MultiGet( Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in targets) all_targets = FrozenOrderedSet(itertools.chain(*direct_deps, targets)) else: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest(request.addresses)) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get(StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)) input_digests.append( prepared_sources.stripped_source_files.snapshot.digest) merged_input_digest = await Get(Digest, MergeDigests(input_digests)) if request.hardcoded_interpreter_constraints: interpreter_constraints = request.hardcoded_interpreter_constraints else: calculated_constraints = PexInterpreterConstraints.create_from_targets( all_targets, python_setup) # If there are no targets, we fall back to the global constraints. This is relevant, # for example, when running `./pants repl` with no specs. interpreter_constraints = calculated_constraints or PexInterpreterConstraints( python_setup.interpreter_constraints) exact_reqs = PexRequirements.create_from_requirement_fields( (tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField)), additional_requirements=request.additional_requirements, ) requirements = exact_reqs repository_pex: Pex | None = None description = request.description if constraints_file.path: constraints_file_contents = await Get(DigestContents, Digest, constraints_file.digest) constraints_file_reqs = set( parse_requirements_file( constraints_file_contents[0].content.decode(), rel_path=constraints_file.path, )) # In requirement strings, Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let # packaging canonicalize for us. # See: https://www.python.org/dev/peps/pep-0503/#normalized-names exact_req_projects = { canonicalize_project_name(Requirement.parse(req).project_name) for req in exact_reqs } constraint_file_projects = { canonicalize_project_name(req.project_name) for req in constraints_file_reqs } unconstrained_projects = exact_req_projects - constraint_file_projects if unconstrained_projects: constraints_descr = ( f"constraints file {constraints_file.path}" if python_setup.requirement_constraints else f"_python_constraints target {python_setup.requirement_constraints_target}" ) logger.warning( f"The {constraints_descr} does not contain entries for the following " f"requirements: {', '.join(unconstrained_projects)}") if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or ( python_setup.resolve_all_constraints == ResolveAllConstraintsOption.NONDEPLOYABLES and request.internal_only): if unconstrained_projects: logger.warning( "Ignoring `[python_setup].resolve_all_constraints` option because constraints " "file does not cover all requirements.") else: repository_pex = await Get( Pex, PexRequest( description= f"Resolving {python_setup.requirement_constraints}", output_filename="repository.pex", internal_only=request.internal_only, requirements=PexRequirements( str(req) for req in constraints_file_reqs), interpreter_constraints=interpreter_constraints, platforms=request.platforms, ), ) elif (python_setup.resolve_all_constraints != ResolveAllConstraintsOption.NEVER and python_setup.resolve_all_constraints_was_set_explicitly()): raise ValueError( "[python-setup].resolve_all_constraints is set to " f"{python_setup.resolve_all_constraints.value}, so " "either [python-setup].requirement_constraints or " "[python-setup].requirement_constraints_target must also be provided." ) return PexRequest( output_filename=request.output_filename, internal_only=request.internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, main=request.main, sources=merged_input_digest, additional_inputs=request.additional_inputs, repository_pex=repository_pex, additional_args=request.additional_args, description=description, )