async def _pylint_interpreter_constraints( first_party_plugins: PylintFirstPartyPlugins, python_setup: PythonSetup, ) -> InterpreterConstraints: # While Pylint will run in partitions, we need a set of constraints that works with every # partition. We must also consider any 3rd-party requirements used by 1st-party plugins. # # This first computes the constraints for each individual target. Then, it ORs all unique # resulting interpreter constraints. The net effect is that every possible Python interpreter # used will be covered. all_tgts = await Get(AllTargets, AllTargetsRequest()) unique_constraints = { InterpreterConstraints.create_from_compatibility_fields( ( tgt[InterpreterConstraintsField], *first_party_plugins.interpreter_constraints_fields, ), python_setup, ) for tgt in all_tgts if PylintFieldSet.is_applicable(tgt) } if not unique_constraints: unique_constraints.add( InterpreterConstraints.create_from_compatibility_fields( first_party_plugins.interpreter_constraints_fields, python_setup, ) ) constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints)) return constraints or InterpreterConstraints(python_setup.interpreter_constraints)
async def setup_mypy_lockfile( _: MyPyLockfileSentinel, first_party_plugins: MyPyFirstPartyPlugins, mypy: MyPy, python_setup: PythonSetup, ) -> PythonLockfileRequest: if not mypy.uses_lockfile: return PythonLockfileRequest.from_tool(mypy) constraints = mypy.interpreter_constraints if mypy.options.is_default("interpreter_constraints"): all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")])) all_transitive_targets = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_build_targets if MyPyFieldSet.is_applicable(tgt)) unique_constraints = { InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup) for transitive_targets in all_transitive_targets } code_constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) if code_constraints.requires_python38_or_newer( python_setup.interpreter_universe): constraints = code_constraints return PythonLockfileRequest.from_tool( mypy, constraints, extra_requirements=first_party_plugins.requirement_strings)
async def setup_pytest_lockfile( _: PytestLockfileSentinel, pytest: PyTest, python_setup: PythonSetup) -> PythonLockfileRequest: if not pytest.uses_lockfile: return PythonLockfileRequest.from_tool(pytest) # Even though we run each python_tests target in isolation, we need a single lockfile that # works with them all (and their transitive deps). # # This first computes the constraints for each individual `python_tests` target # (which will AND across each target in the closure). Then, it ORs all unique resulting # interpreter constraints. The net effect is that every possible Python interpreter used will # be covered. all_build_targets = await Get(UnexpandedTargets, AddressSpecs([DescendantAddresses("")])) transitive_targets_per_test = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_build_targets if PythonTestFieldSet.is_applicable(tgt)) unique_constraints = { InterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) for transitive_targets in transitive_targets_per_test } constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) return PythonLockfileRequest.from_tool( pytest, constraints or InterpreterConstraints(python_setup.interpreter_constraints))
async def setup_ipython_lockfile( _: IPythonLockfileSentinel, ipython: IPython, python_setup: PythonSetup) -> GeneratePythonLockfile: if not ipython.uses_lockfile: return GeneratePythonLockfile.from_tool( ipython, use_pex=python_setup.generate_lockfiles_with_pex) # IPython is often run against the whole repo (`./pants repl ::`), but it is possible to run # on subsets of the codebase with disjoint interpreter constraints, such as # `./pants repl py2::` and then `./pants repl py3::`. Still, even with those subsets possible, # we need a single lockfile that works with all possible Python interpreters in use. # # This ORs all unique interpreter constraints. The net effect is that every possible Python # interpreter used will be covered. all_tgts = await Get(AllTargets, AllTargetsRequest()) unique_constraints = { InterpreterConstraints.create_from_compatibility_fields( [tgt[InterpreterConstraintsField]], python_setup) for tgt in all_tgts if tgt.has_field(InterpreterConstraintsField) } constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) return GeneratePythonLockfile.from_tool( ipython, constraints or InterpreterConstraints(python_setup.interpreter_constraints), use_pex=python_setup.generate_lockfiles_with_pex, )
async def _flake8_interpreter_constraints( first_party_plugins: Flake8FirstPartyPlugins, python_setup: PythonSetup, ) -> InterpreterConstraints: # While Flake8 will run in partitions, we need a set of constraints that works with every # partition. # # This ORs all unique interpreter constraints. The net effect is that every possible Python # interpreter used will be covered. all_tgts = await Get(AllTargets, AllTargetsRequest()) unique_constraints = { InterpreterConstraints.create_from_compatibility_fields( ( tgt[InterpreterConstraintsField], *first_party_plugins.interpreter_constraints_fields, ), python_setup, ) for tgt in all_tgts if Flake8FieldSet.is_applicable(tgt) } if not unique_constraints: unique_constraints.add( InterpreterConstraints.create_from_compatibility_fields( first_party_plugins.interpreter_constraints_fields, python_setup, )) constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) return constraints or InterpreterConstraints( python_setup.interpreter_constraints)
async def package_python_dist( field_set: PythonDistributionFieldSet, python_setup: PythonSetup, ) -> BuiltPackage: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([field_set.address])) exported_target = ExportedTarget(transitive_targets.roots[0]) interpreter_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup) or InterpreterConstraints( python_setup.interpreter_constraints) chroot = await Get( SetupPyChroot, SetupPyChrootRequest(exported_target, py2=interpreter_constraints.includes_python2()), ) # If commands were provided, run setup.py with them; Otherwise just dump chroots. commands = exported_target.target.get(SetupPyCommandsField).value or () if commands: validate_commands(commands) setup_py_result = await Get( RunSetupPyResult, RunSetupPyRequest(exported_target, interpreter_constraints, chroot, commands), ) dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output) return BuiltPackage( setup_py_result.output, tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files), ) else: dirname = f"{chroot.setup_kwargs.name}-{chroot.setup_kwargs.version}" rel_chroot = await Get(Digest, AddPrefix(chroot.digest, dirname)) return BuiltPackage(rel_chroot, (BuiltPackageArtifact(dirname), ))
async def setup_mypy_lockfile( _: MyPyLockfileSentinel, first_party_plugins: MyPyFirstPartyPlugins, mypy: MyPy, python_setup: PythonSetup, ) -> GeneratePythonLockfile: if not mypy.uses_lockfile: return GeneratePythonLockfile.from_tool( mypy, use_pex=python_setup.generate_lockfiles_with_pex) constraints = mypy.interpreter_constraints if mypy.options.is_default("interpreter_constraints"): all_tgts = await Get(AllTargets, AllTargetsRequest()) all_transitive_targets = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_tgts if MyPyFieldSet.is_applicable(tgt)) unique_constraints = { InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup) for transitive_targets in all_transitive_targets } code_constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) if code_constraints.requires_python38_or_newer( python_setup.interpreter_universe): constraints = code_constraints return GeneratePythonLockfile.from_tool( mypy, constraints, extra_requirements=first_party_plugins.requirement_strings, use_pex=python_setup.generate_lockfiles_with_pex, )
def test_contains(candidate, target, matches) -> None: assert ( InterpreterConstraints(candidate).contains( InterpreterConstraints(target), ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"] ) == matches )
def test_interpreter_constraints_minimum_python_version( constraints: List[str], expected: str) -> None: universe = ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"] ics = InterpreterConstraints(constraints) assert ics.minimum_python_version(universe) == expected assert ics.minimum_python_version(reversed(universe)) == expected assert ics.minimum_python_version(sorted(universe)) == expected
def test_group_field_sets_by_constraints_with_unsorted_inputs() -> None: py3_fs = [ MockFieldSet.create_for_test( Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*" ), MockFieldSet.create_for_test( Address("src/python/b_dir/path.py", target_name="test"), ">2.7,<3" ), MockFieldSet.create_for_test( Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*" ), ] ic_36 = InterpreterConstraints([Requirement.parse("CPython==3.6.*")]) output = InterpreterConstraints.group_field_sets_by_constraints( py3_fs, python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), ) assert output[ic_36] == ( MockFieldSet.create_for_test( Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*" ), MockFieldSet.create_for_test( Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*" ), )
async def setup_setuptools_lockfile( _: SetuptoolsLockfileSentinel, setuptools: Setuptools, python_setup: PythonSetup ) -> GeneratePythonLockfile: if not setuptools.uses_custom_lockfile: return GeneratePythonLockfile.from_tool( setuptools, use_pex=python_setup.generate_lockfiles_with_pex ) all_tgts = await Get(AllTargets, AllTargetsRequest()) transitive_targets_per_python_dist = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([tgt.address])) for tgt in all_tgts if PythonDistributionFieldSet.is_applicable(tgt) ) unique_constraints = { InterpreterConstraints.create_from_targets(transitive_targets.closure, python_setup) or InterpreterConstraints(python_setup.interpreter_constraints) for transitive_targets in transitive_targets_per_python_dist } constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints)) return GeneratePythonLockfile.from_tool( setuptools, constraints or InterpreterConstraints(python_setup.interpreter_constraints), use_pex=python_setup.generate_lockfiles_with_pex, )
async def setup_bandit_lockfile( _: BanditLockfileSentinel, bandit: Bandit, python_setup: PythonSetup ) -> GeneratePythonLockfile: if not bandit.uses_lockfile: return GeneratePythonLockfile.from_tool( bandit, use_pex=python_setup.generate_lockfiles_with_pex ) # While Bandit will run in partitions, we need a single lockfile that works with every # partition. # # This ORs all unique interpreter constraints. The net effect is that every possible Python # interpreter used will be covered. all_tgts = await Get(AllTargets, AllTargetsRequest()) unique_constraints = { InterpreterConstraints.create_from_targets([tgt], python_setup) for tgt in all_tgts if BanditFieldSet.is_applicable(tgt) } constraints = InterpreterConstraints(itertools.chain.from_iterable(unique_constraints)) return GeneratePythonLockfile.from_tool( bandit, constraints or InterpreterConstraints(python_setup.interpreter_constraints), use_pex=python_setup.generate_lockfiles_with_pex, )
def test_snap_to_minimum(constraints, expected) -> None: universe = ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"] ics = InterpreterConstraints(constraints) snapped = ics.snap_to_minimum(universe) if expected is None: assert snapped is None else: assert snapped == InterpreterConstraints([expected])
def test_is_valid_for_v2_only(user_ics_iter, expected_ics_iter, user_reqs, expected_reqs, matches) -> None: user_ic = InterpreterConstraints(user_ics_iter) expected_ic = InterpreterConstraints(expected_ics_iter) m = LockfileMetadataV2(expected_ic, reqset(*expected_reqs)) assert bool( m.is_valid_for("", user_ic, INTERPRETER_UNIVERSE, reqset(*user_reqs))) == matches
def test_constraints_are_correctly_sorted_at_construction() -> None: # #12578: This list itself is out of order, and `CPython>=3.6,<4,!=3.7.*` is specified with # out-of-order component requirements. This test verifies that the list is fully sorted after # the first call to `InterpreterConstraints()` inputs = ["CPython==2.7.*", "PyPy", "CPython>=3.6,<4,!=3.7.*"] a = InterpreterConstraints(inputs) a_str = [str(i) for i in a] b = InterpreterConstraints(a_str) assert a == b
async def package_python_dist( field_set: PythonDistributionFieldSet, python_setup: PythonSetup, ) -> BuiltPackage: transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])) exported_target = ExportedTarget(transitive_targets.roots[0]) dist_tgt = exported_target.target wheel = dist_tgt.get(WheelField).value sdist = dist_tgt.get(SDistField).value if not wheel and not sdist: raise NoDistTypeSelected( f"In order to package {dist_tgt.address.spec} at least one of {WheelField.alias!r} or " f"{SDistField.alias!r} must be `True`." ) wheel_config_settings = dist_tgt.get(WheelConfigSettingsField).value or FrozenDict() sdist_config_settings = dist_tgt.get(SDistConfigSettingsField).value or FrozenDict() interpreter_constraints = InterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup ) or InterpreterConstraints(python_setup.interpreter_constraints) chroot = await Get( DistBuildChroot, DistBuildChrootRequest( exported_target, py2=interpreter_constraints.includes_python2(), ), ) # We prefix the entire chroot, and run with this prefix as the cwd, so that we can capture # any changes setup made within it without also capturing other artifacts of the pex # process invocation. chroot_prefix = "chroot" working_directory = os.path.join(chroot_prefix, chroot.working_directory) prefixed_chroot = await Get(Digest, AddPrefix(chroot.digest, chroot_prefix)) build_system = await Get(BuildSystem, BuildSystemRequest(prefixed_chroot, working_directory)) setup_py_result = await Get( DistBuildResult, DistBuildRequest( build_system=build_system, interpreter_constraints=interpreter_constraints, build_wheel=wheel, build_sdist=sdist, input=prefixed_chroot, working_directory=working_directory, target_address_spec=exported_target.target.address.spec, wheel_config_settings=wheel_config_settings, sdist_config_settings=sdist_config_settings, ), ) dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output) return BuiltPackage( setup_py_result.output, tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files), )
def test_multiple_resolves() -> None: rule_runner = RuleRunner( rules=[ setup_user_lockfile_requests, SubsystemRule(PythonSetup), QueryRule(UserGenerateLockfiles, [RequestedPythonUserResolveNames]), ], target_types=[PythonRequirementTarget], ) rule_runner.write_files({ "BUILD": dedent("""\ python_requirement( name='a', requirements=['a'], resolve='a', ) python_requirement( name='b', requirements=['b'], resolve='b', ) """), }) rule_runner.set_options( [ "--python-resolves={'a': 'a.lock', 'b': 'b.lock'}", # Override interpreter constraints for 'b', but use default for 'a'. "--python-resolves-to-interpreter-constraints={'b': ['==3.7.*']}", "--python-enable-resolves", "--python-lockfile-generator=pex", ], env_inherit=PYTHON_BOOTSTRAP_ENV, ) result = rule_runner.request(UserGenerateLockfiles, [RequestedPythonUserResolveNames(["a", "b"])]) assert set(result) == { GeneratePythonLockfile( requirements=FrozenOrderedSet(["a"]), interpreter_constraints=InterpreterConstraints( PythonSetup.default_interpreter_constraints), resolve_name="a", lockfile_dest="a.lock", use_pex=True, ), GeneratePythonLockfile( requirements=FrozenOrderedSet(["b"]), interpreter_constraints=InterpreterConstraints(["==3.7.*"]), resolve_name="b", lockfile_dest="b.lock", use_pex=True, ), }
def test_is_valid_for_v1(user_digest, expected_digest, user_ic, expected_ic, matches) -> None: m: LockfileMetadata m = LockfileMetadataV1(InterpreterConstraints(expected_ic), expected_digest) assert (bool( m.is_valid_for( user_digest, InterpreterConstraints(user_ic), INTERPRETER_UNIVERSE, set(), )) == matches)
async def export_venv(request: ExportedVenvRequest, python_setup: PythonSetup, pex_env: PexEnvironment) -> ExportableData: # Pick a single interpreter for the venv. interpreter_constraints = InterpreterConstraints.create_from_targets( request.targets, python_setup) if not interpreter_constraints: # If there were no targets that defined any constraints, fall back to the global ones. interpreter_constraints = InterpreterConstraints( python_setup.interpreter_constraints) min_interpreter = interpreter_constraints.snap_to_minimum( python_setup.interpreter_universe) if not min_interpreter: raise ExportError( "The following interpreter constraints were computed for all the targets for which " f"export was requested: {interpreter_constraints}. There is no python interpreter " "compatible with these constraints. Please restrict the target set to one that shares " "a compatible interpreter.") venv_pex = await Get( VenvPex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (tgt.address for tgt in request.targets), internal_only=True, hardcoded_interpreter_constraints=min_interpreter, ), ) complete_pex_env = pex_env.in_workspace() venv_abspath = os.path.join(complete_pex_env.pex_root, venv_pex.venv_rel_dir) # Run the venv_pex to get the full python version (including patch #), so we # can use it in the symlink name. res = await Get( ProcessResult, VenvPexProcess( venv_pex=venv_pex, description="Create virtualenv", argv=[ "-c", "import sys; print('.'.join(str(x) for x in sys.version_info[0:3]))" ], input_digest=venv_pex.digest, ), ) py_version = res.stdout.strip().decode() return ExportableData( f"virtualenv for {min_interpreter}", os.path.join("python", "virtualenv"), symlinks=[Symlink(venv_abspath, py_version)], )
async def setup_pylint_lockfile( _: PylintLockfileSentinel, first_party_plugins: PylintFirstPartyPlugins, pylint: Pylint, python_setup: PythonSetup, ) -> GeneratePythonLockfile: if not pylint.uses_lockfile: return GeneratePythonLockfile.from_tool( pylint, use_pex=python_setup.generate_lockfiles_with_pex) # While Pylint will run in partitions, we need a single lockfile that works with every # partition. We must also consider any 3rd-party requirements used by 1st-party plugins. # # This first computes the constraints for each individual target, including its direct # dependencies (which will AND across each target in the closure). Then, it ORs all unique # resulting interpreter constraints. The net effect is that every possible Python interpreter # used will be covered. all_tgts = await Get(AllTargets, AllTargetsRequest()) relevant_targets = tuple(tgt for tgt in all_tgts if PylintFieldSet.is_applicable(tgt)) direct_deps_per_target = await MultiGet( Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in relevant_targets) unique_constraints = set() for tgt, direct_deps in zip(relevant_targets, direct_deps_per_target): constraints_fields = (t[InterpreterConstraintsField] for t in (tgt, *direct_deps) if t.has_field(InterpreterConstraintsField)) unique_constraints.add( InterpreterConstraints.create_from_compatibility_fields( (*constraints_fields, *first_party_plugins.interpreter_constraints_fields), python_setup, )) if not unique_constraints: unique_constraints.add( InterpreterConstraints.create_from_compatibility_fields( first_party_plugins.interpreter_constraints_fields, python_setup, )) constraints = InterpreterConstraints( itertools.chain.from_iterable(unique_constraints)) return GeneratePythonLockfile.from_tool( pylint, constraints or InterpreterConstraints(python_setup.interpreter_constraints), extra_requirements=first_party_plugins.requirement_strings, use_pex=python_setup.generate_lockfiles_with_pex, )
def test_is_valid_for_v1(user_digest, expected_digest, user_ic, expected_ic, matches) -> None: m: PythonLockfileMetadata m = PythonLockfileMetadataV1(InterpreterConstraints(expected_ic), expected_digest) assert ( bool( m.is_valid_for( is_tool=True, expected_invalidation_digest=user_digest, user_interpreter_constraints=InterpreterConstraints(user_ic), interpreter_universe=INTERPRETER_UNIVERSE, user_requirements=set(), ) ) == matches )
def assert_lockfile_request( build_file: str, expected_ics: list[str], *, extra_expected_requirements: list[str] | None = None, extra_args: list[str] | None = None, ) -> None: rule_runner.write_files({ "project/BUILD": build_file, "project/f.py": "" }) rule_runner.set_options( ["--mypy-lockfile=lockfile.txt", *(extra_args or [])], env={ "PANTS_PYTHON_INTERPRETER_CONSTRAINTS": f"['{global_constraint}']" }, env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) lockfile_request = rule_runner.request(PythonLockfileRequest, [MyPyLockfileSentinel()]) assert lockfile_request.interpreter_constraints == InterpreterConstraints( expected_ics) assert lockfile_request.requirements == FrozenOrderedSet([ MyPy.default_version, *MyPy.default_extra_requirements, *(extra_expected_requirements or ()), ])
def maybe_assert_configured(*, has_config: bool, args: list[str], warning: str = "") -> None: rule_runner.set_options( [ f"--mypy-args={repr(args)}", f"--mypy-config-discovery={has_config}" ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) result = rule_runner.request(MyPyConfigFile, []) assert result.digest == (config_digest if has_config else EMPTY_DIGEST) should_be_configured = has_config or bool(args) assert result._python_version_configured == should_be_configured autoset_python_version = result.python_version_to_autoset( InterpreterConstraints([">=3.6"]), ["2.7", "3.6", "3.7", "3.8"]) if should_be_configured: assert autoset_python_version is None else: assert autoset_python_version == "3.6" if should_be_configured: assert caplog.records assert warning in caplog.text caplog.clear() else: assert not caplog.records
async def setup_user_lockfile_requests( requested: RequestedPythonUserResolveNames, all_targets: AllTargets, python_setup: PythonSetup) -> UserGenerateLockfiles: if not (python_setup.enable_resolves and python_setup.resolves_generate_lockfiles): return UserGenerateLockfiles() resolve_to_requirements_fields = defaultdict(set) for tgt in all_targets: if not tgt.has_fields( (PythonRequirementResolveField, PythonRequirementsField)): continue resolve = tgt[PythonRequirementResolveField].normalized_value( python_setup) resolve_to_requirements_fields[resolve].add( tgt[PythonRequirementsField]) return UserGenerateLockfiles( GeneratePythonLockfile( requirements=PexRequirements.create_from_requirement_fields( resolve_to_requirements_fields[resolve], constraints_strings=(), ).req_strings, interpreter_constraints=InterpreterConstraints( python_setup.resolves_to_interpreter_constraints.get( resolve, python_setup.interpreter_constraints)), resolve_name=resolve, lockfile_dest=python_setup.resolves[resolve], use_pex=python_setup.generate_lockfiles_with_pex, ) for resolve in requested)
def test_add_header_to_lockfile() -> None: input_lockfile = b"""dave==3.1.4 \\ --hash=sha256:cab0c0c0c0c0dadacafec0c0c0c0cafedadabeefc0c0c0c0feedbeeffeedbeef \\ """ expected = b""" # This lockfile was autogenerated by Pants. To regenerate, run: # # ./pants lock # # --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE --- # { # "version": 2, # "valid_for_interpreter_constraints": [ # "CPython>=3.7" # ], # "generated_with_requirements": [ # "ansicolors==0.1.0" # ] # } # --- END PANTS LOCKFILE METADATA --- dave==3.1.4 \\ --hash=sha256:cab0c0c0c0c0dadacafec0c0c0c0cafedadabeefc0c0c0c0feedbeeffeedbeef \\ """ def line_by_line(b: bytes) -> list[bytes]: return [i for i in (j.strip() for j in b.splitlines()) if i] metadata = LockfileMetadata.new(InterpreterConstraints([">=3.7"]), reqset("ansicolors==0.1.0")) result = metadata.add_header_to_lockfile(input_lockfile, regenerate_command="./pants lock") assert line_by_line(result) == line_by_line(expected)
def assert_imports_parsed( rule_runner: RuleRunner, content: str | None, *, expected: list[str], filename: str = "project/foo.py", constraints: str = ">=3.6", string_imports: bool = True, ) -> None: rule_runner.set_options([], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) files = {"project/BUILD": "python_library(sources=['**/*.py'])"} if content is not None: files[filename] = content rule_runner.write_files(files) # type: ignore[arg-type] tgt = rule_runner.get_target(Address("project")) imports = rule_runner.request( ParsedPythonImports, [ ParsePythonImportsRequest( tgt[PythonSources], InterpreterConstraints([constraints]), string_imports=string_imports, ) ], ) assert list(imports) == sorted(expected)
def from_tool( cls, subsystem: PythonToolRequirementsBase, interpreter_constraints: InterpreterConstraints | None = None, *, extra_requirements: Iterable[str] = (), ) -> PythonLockfileRequest: """Create a request for a dedicated lockfile for the tool. If the tool determines its interpreter constraints by using the constraints of user code, rather than the option `--interpreter-constraints`, you must pass the arg `interpreter_constraints`. """ if not subsystem.uses_lockfile: return cls( FrozenOrderedSet(), InterpreterConstraints(), resolve_name=subsystem.options_scope, lockfile_dest=subsystem.lockfile, ) return cls( requirements=FrozenOrderedSet((*subsystem.all_requirements, *extra_requirements)), interpreter_constraints=( interpreter_constraints if interpreter_constraints is not None else subsystem.interpreter_constraints ), resolve_name=subsystem.options_scope, lockfile_dest=subsystem.lockfile, )
def create_pex_and_get_pex_info( rule_runner: RuleRunner, *, pex_type: type[Pex | VenvPex] = Pex, requirements: PexRequirements | Lockfile | LockfileContent = PexRequirements(), main: MainSpecification | None = None, interpreter_constraints: InterpreterConstraints = InterpreterConstraints(), platforms: PexPlatforms = PexPlatforms(), sources: Digest | None = None, additional_pants_args: tuple[str, ...] = (), additional_pex_args: tuple[str, ...] = (), internal_only: bool = True, ) -> Mapping[str, Any]: return create_pex_and_get_all_data( rule_runner, pex_type=pex_type, requirements=requirements, main=main, interpreter_constraints=interpreter_constraints, platforms=platforms, sources=sources, additional_pants_args=additional_pants_args, additional_pex_args=additional_pex_args, internal_only=internal_only, ).info
def test_lockfile_validation(rule_runner: RuleRunner) -> None: """Check that we properly load and validate lockfile metadata for both types of locks. Note that we don't exhaustively test every source of lockfile failure nor the different options for `--invalid-lockfile-behavior`, as those are already tested in pex_requirements_test.py. """ # We create a lockfile that claims it works with no requirements. It should fail when we try # to build a PEX with a requirement. lock_content = PythonLockfileMetadata.new(InterpreterConstraints(), set()).add_header_to_lockfile( b"", regenerate_command="regen", delimeter="#") rule_runner.write_files({"lock.txt": lock_content.decode()}) lockfile = Lockfile( "lock.txt", file_path_description_of_origin="a test", resolve_name="a", req_strings=FrozenOrderedSet("ansicolors"), ) with engine_error(InvalidLockfileError): create_pex_and_get_all_data(rule_runner, requirements=lockfile) lockfile_content = LockfileContent( FileContent("lock.txt", lock_content), resolve_name="a", req_strings=FrozenOrderedSet("ansicolors"), ) with engine_error(InvalidLockfileError): create_pex_and_get_all_data(rule_runner, requirements=lockfile_content)
async def flake8_lint( request: Flake8Request, flake8: Flake8, python_setup: PythonSetup, first_party_plugins: Flake8FirstPartyPlugins, ) -> LintResults: if flake8.skip: return LintResults([], linter_name=request.name) # NB: Flake8 output depends upon which Python interpreter version it's run with # (http://flake8.pycqa.org/en/latest/user/invocation.html). We batch targets by their # constraints to ensure, for example, that all Python 2 targets run together and all Python 3 # targets run together. results = defaultdict(set) for fs in request.field_sets: constraints = InterpreterConstraints.create_from_compatibility_fields( [ fs.interpreter_constraints, *first_party_plugins.interpreter_constraints_fields ], python_setup, ) results[constraints].add(fs) partitioned_results = await MultiGet( Get( LintResult, Flake8Partition( tuple(sorted(field_sets, key=lambda fs: fs.address)), constraints), ) for constraints, field_sets in sorted(results.items())) return LintResults(partitioned_results, linter_name=request.name)