def test_group_field_sets_by_constraints_with_unsorted_inputs() -> None: py3_fs = [ MockFieldSet.create_for_test( Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*"), MockFieldSet.create_for_test( Address("src/python/b_dir/path.py", target_name="test"), ">2.7,<3"), MockFieldSet.create_for_test( Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*"), ] ic_36 = PexInterpreterConstraints([Requirement.parse("CPython==3.6.*")]) output = PexInterpreterConstraints.group_field_sets_by_constraints( py3_fs, python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), ) assert output[ic_36] == ( MockFieldSet.create_for_test( Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*"), MockFieldSet.create_for_test( Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*"), )
async def mypy_typecheck( request: MyPyRequest, mypy: MyPy, python_setup: PythonSetup ) -> TypecheckResults: if mypy.skip: return TypecheckResults([], typechecker_name="MyPy") # We batch targets by their interpreter constraints to ensure, for example, that all Python 2 # targets run together and all Python 3 targets run together. We can only do this by setting # the `--python-version` option, but we allow the user to set it as a safety valve. We warn if # they've set the option. config_files = await Get(ConfigFiles, ConfigFilesRequest, mypy.config_request) config_content = await Get(DigestContents, Digest, config_files.snapshot.digest) python_version_configured = check_and_warn_if_python_version_configured( config=next(iter(config_content), None), args=mypy.args ) # When determining how to batch by interpreter constraints, we must consider the entire # transitive closure to get the final resulting constraints. # TODO(#10863): Improve the performance of this. transitive_targets_per_field_set = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])) for field_set in request.field_sets ) interpreter_constraints_to_transitive_targets = defaultdict(set) for transitive_targets in transitive_targets_per_field_set: interpreter_constraints = PexInterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup ) or PexInterpreterConstraints(mypy.interpreter_constraints) interpreter_constraints_to_transitive_targets[interpreter_constraints].add( transitive_targets ) partitions = [] for interpreter_constraints, all_transitive_targets in sorted( interpreter_constraints_to_transitive_targets.items() ): combined_roots: OrderedSet[Target] = OrderedSet() combined_closure: OrderedSet[Target] = OrderedSet() for transitive_targets in all_transitive_targets: combined_roots.update(transitive_targets.roots) combined_closure.update(transitive_targets.closure) partitions.append( MyPyPartition( FrozenOrderedSet(combined_roots), FrozenOrderedSet(combined_closure), interpreter_constraints, python_version_already_configured=python_version_configured, ) ) partitioned_results = await MultiGet( Get(TypecheckResult, MyPyPartition, partition) for partition in partitions ) return TypecheckResults(partitioned_results, typechecker_name="MyPy")
async def infer_python_dependencies_via_imports( request: InferPythonImportDependencies, python_infer_subsystem: PythonInferSubsystem, python_setup: PythonSetup, ) -> InferredDependencies: if not python_infer_subsystem.imports: return InferredDependencies([], sibling_dependencies_inferrable=False) wrapped_tgt = await Get(WrappedTarget, Address, request.sources_field.address) detected_imports = await Get( ParsedPythonImports, ParsePythonImportsRequest( request.sources_field, PexInterpreterConstraints.create_from_targets([wrapped_tgt.target], python_setup), ), ) relevant_imports = (detected_imports.all_imports if python_infer_subsystem.string_imports else detected_imports.explicit_imports) owners_per_import = await MultiGet( Get(PythonModuleOwners, PythonModule(imported_module)) for imported_module in relevant_imports if imported_module not in combined_stdlib) merged_result = sorted( set(itertools.chain.from_iterable(owners_per_import))) return InferredDependencies(merged_result, sibling_dependencies_inferrable=True)
def create_pex_and_get_pex_info( rule_runner: RuleRunner, *, requirements=PexRequirements(), entry_point=None, interpreter_constraints=PexInterpreterConstraints(), platforms=PexPlatforms(), sources: Optional[Digest] = None, additional_pants_args: Tuple[str, ...] = (), additional_pex_args: Tuple[str, ...] = (), internal_only: bool = True, ) -> Dict: return cast( Dict, create_pex_and_get_all_data( rule_runner, requirements=requirements, entry_point=entry_point, interpreter_constraints=interpreter_constraints, platforms=platforms, sources=sources, additional_pants_args=additional_pants_args, additional_pex_args=additional_pex_args, internal_only=internal_only, )["info"], )
def create_pex_and_get_pex_info( rule_runner: RuleRunner, *, pex_type: type[Pex | VenvPex] = Pex, requirements: PexRequirements = PexRequirements(), main: MainSpecification | None = None, interpreter_constraints: PexInterpreterConstraints = PexInterpreterConstraints(), platforms: PexPlatforms = PexPlatforms(), sources: Digest | None = None, additional_pants_args: Tuple[str, ...] = (), additional_pex_args: Tuple[str, ...] = (), internal_only: bool = True, ) -> Dict: return cast( Dict, create_pex_and_get_all_data( rule_runner, pex_type=pex_type, requirements=requirements, main=main, interpreter_constraints=interpreter_constraints, platforms=platforms, sources=sources, additional_pants_args=additional_pants_args, additional_pex_args=additional_pex_args, internal_only=internal_only, )["info"], )
async def run_setup_py(req: RunSetupPyRequest, setuptools: Setuptools) -> RunSetupPyResult: """Run a setup.py command on a single exported target.""" # Note that this pex has no entrypoint. We use it to run our generated setup.py, which # in turn imports from and invokes setuptools. setuptools_pex = await Get( VenvPex, PexRequest( output_filename="setuptools.pex", internal_only=True, requirements=PexRequirements(setuptools.all_requirements), interpreter_constraints=( req.interpreter_constraints if setuptools.options.is_default("interpreter_constraints") else PexInterpreterConstraints(setuptools.interpreter_constraints) ), ), ) # The setuptools dist dir, created by it under the chroot (not to be confused with # pants's own dist dir, at the buildroot). dist_dir = "dist/" result = await Get( ProcessResult, VenvPexProcess( setuptools_pex, argv=("setup.py", *req.args), input_digest=req.chroot.digest, # setuptools commands that create dists write them to the distdir. # TODO: Could there be other useful files to capture? output_directories=(dist_dir,), description=f"Run setuptools for {req.exported_target.target.address}", level=LogLevel.DEBUG, ), ) output_digest = await Get(Digest, RemovePrefix(result.output_digest, dist_dir)) return RunSetupPyResult(output_digest)
def assert_imports_parsed( rule_runner: RuleRunner, content: str | None, *, expected: list[str], filename: str = "project/foo.py", constraints: str = ">=3.6", string_imports: bool = True, ) -> None: rule_runner.set_options([], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) files = {"project/BUILD": "python_library(sources=['**/*.py'])"} if content is not None: files[filename] = content rule_runner.write_files(files) # type: ignore[arg-type] tgt = rule_runner.get_target(Address("project")) imports = rule_runner.request( ParsedPythonImports, [ ParsePythonImportsRequest( tgt[PythonSources], PexInterpreterConstraints([constraints]), string_imports=string_imports, ) ], ) assert list(imports) == sorted(expected)
def test_group_field_sets_by_constraints() -> None: py2_fs = MockFieldSet.create_for_test("//:py2", ">=2.7,<3") py3_fs = [ MockFieldSet.create_for_test("//:py3", "==3.6.*"), MockFieldSet.create_for_test("//:py3_second", "==3.6.*"), ] no_constraints_fs = MockFieldSet.create_for_test("//:no_constraints", None) assert PexInterpreterConstraints.group_field_sets_by_constraints( [py2_fs, *py3_fs, no_constraints_fs], python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), ) == FrozenDict({ PexInterpreterConstraints(): (no_constraints_fs, ), PexInterpreterConstraints(["CPython>=2.7,<3"]): (py2_fs, ), PexInterpreterConstraints(["CPython==3.6.*"]): tuple(py3_fs), })
async def package_python_dist( field_set: PythonDistributionFieldSet, python_setup: PythonSetup, ) -> BuiltPackage: transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])) exported_target = ExportedTarget(transitive_targets.roots[0]) interpreter_constraints = PexInterpreterConstraints.create_from_targets( transitive_targets.closure, python_setup ) chroot = await Get( SetupPyChroot, SetupPyChrootRequest(exported_target, py2=interpreter_constraints.includes_python2()), ) # If commands were provided, run setup.py with them; Otherwise just dump chroots. commands = exported_target.target.get(SetupPyCommandsField).value or () if commands: validate_commands(commands) setup_py_result = await Get( RunSetupPyResult, RunSetupPyRequest(exported_target, interpreter_constraints, chroot, commands), ) dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output) return BuiltPackage( setup_py_result.output, tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files), ) else: dirname = f"{chroot.setup_kwargs.name}-{chroot.setup_kwargs.version}" rel_chroot = await Get(Digest, AddPrefix(chroot.digest, dirname)) return BuiltPackage(rel_chroot, (BuiltPackageArtifact(dirname),))
async def pylint_lint(request: PylintRequest, pylint: Pylint, python_setup: PythonSetup) -> LintResults: if pylint.skip: return LintResults([], linter_name="Pylint") plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs, pylint.source_plugins) plugin_targets_request = Get( TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses)) linted_targets_request = Get( Targets, Addresses(field_set.address for field_set in request.field_sets)) plugin_targets, linted_targets = await MultiGet(plugin_targets_request, linted_targets_request) plugin_targets_compatibility_fields = tuple( plugin_tgt[InterpreterConstraintsField] for plugin_tgt in plugin_targets.closure if plugin_tgt.has_field(InterpreterConstraintsField)) # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it # doesn't lint those direct dependencies nor does it care about transitive dependencies. per_target_dependencies = await MultiGet( Get(Targets, DependenciesRequest(field_set.dependencies)) for field_set in request.field_sets) # We batch targets by their interpreter constraints to ensure, for example, that all Python 2 # targets run together and all Python 3 targets run together. # Note that Pylint uses the AST of the interpreter that runs it. So, we include any plugin # targets in this interpreter constraints calculation. interpreter_constraints_to_target_setup = defaultdict(set) for field_set, tgt, dependencies in zip(request.field_sets, linted_targets, per_target_dependencies): target_setup = PylintTargetSetup(field_set, Targets([tgt, *dependencies])) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( ( *(tgt[InterpreterConstraintsField] for tgt in [tgt, *dependencies] if tgt.has_field(InterpreterConstraintsField)), *plugin_targets_compatibility_fields, ), python_setup, ) interpreter_constraints_to_target_setup[interpreter_constraints].add( target_setup) partitions = (PylintPartition( tuple( sorted(target_setups, key=lambda tgt_setup: tgt_setup.field_set.address)), interpreter_constraints, Targets(plugin_targets.closure), ) for interpreter_constraints, target_setups in sorted( interpreter_constraints_to_target_setup.items())) partitioned_results = await MultiGet( Get(LintResult, PylintPartition, partition) for partition in partitions) return LintResults(partitioned_results, linter_name="Pylint")
def test_interpreter_constraints(rule_runner: RuleRunner) -> None: constraints = PexInterpreterConstraints( ["CPython>=2.7,<3", "CPython>=3.6"]) pex_info = create_pex_and_get_pex_info(rule_runner, interpreter_constraints=constraints, internal_only=False) assert set( pex_info["interpreter_constraints"]) == {str(c) for c in constraints}
async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup: isort_pex_request = Get( Pex, PexRequest( output_filename="isort.pex", internal_only=True, requirements=PexRequirements(isort.all_requirements), interpreter_constraints=PexInterpreterConstraints( isort.interpreter_constraints), entry_point=isort.entry_point, ), ) config_digest_request = Get( Digest, PathGlobs( globs=isort.config, glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the option `--isort-config`", ), ) source_files_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, isort_pex, config_digest = await MultiGet( source_files_request, isort_pex_request, config_digest_request) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) input_digest = await Get( Digest, MergeDigests( (source_files_snapshot.digest, isort_pex.digest, config_digest)), ) process = await Get( Process, PexProcess( isort_pex, argv=generate_args(source_files=source_files, isort=isort, check_only=setup_request.check_only), input_digest=input_digest, output_files=source_files_snapshot.files, description= f"Run isort on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
def assert_merged(*, inp: List[List[str]], expected: List[str]) -> None: result = sorted( str(req) for req in PexInterpreterConstraints.merge_constraint_sets(inp)) # Requirement.parse() sorts specs differently than we'd like, so we convert each str to a # Requirement. normalized_expected = sorted( str(Requirement.parse(v)) for v in expected) assert result == normalized_expected
async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup: isort_pex_get = Get( VenvPex, PexRequest( output_filename="isort.pex", internal_only=True, requirements=PexRequirements(isort.all_requirements), interpreter_constraints=PexInterpreterConstraints( isort.interpreter_constraints), main=isort.main, ), ) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, isort_pex = await MultiGet(source_files_get, isort_pex_get) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) config_files = await Get(ConfigFiles, ConfigFilesRequest, isort.config_request(source_files_snapshot.dirs)) # Isort 5+ changes how config files are handled. Determine which semantics we should use. is_isort5 = False if isort.config: isort_info = await Get(PexResolveInfo, VenvPex, isort_pex) is_isort5 = any( dist_info.project_name == "isort" and dist_info.version.major >= 5 for dist_info in isort_info) input_digest = await Get( Digest, MergeDigests( (source_files_snapshot.digest, config_files.snapshot.digest))) process = await Get( Process, VenvPexProcess( isort_pex, argv=generate_argv(source_files, isort, is_isort5=is_isort5, check_only=setup_request.check_only), input_digest=input_digest, output_files=source_files_snapshot.files, description= f"Run isort on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
def __init__( self, scheduler: BootstrapScheduler, interpreter_constraints: Optional[PexInterpreterConstraints] = None, ) -> None: self._scheduler = scheduler self._interpreter_constraints = ( interpreter_constraints if interpreter_constraints is not None else PexInterpreterConstraints( [f"=={'.'.join(map(str, sys.version_info[:3]))}"]))
async def setup_coverage(coverage: CoverageSubsystem) -> CoverageSetup: pex = await Get( Pex, PexRequest( output_filename="coverage.pex", internal_only=True, requirements=PexRequirements(coverage.all_requirements), interpreter_constraints=PexInterpreterConstraints(coverage.interpreter_constraints), entry_point=coverage.entry_point, ), ) return CoverageSetup(pex)
async def setup_lambdex(lambdex: Lambdex) -> LambdexSetup: requirements_pex = await Get( Pex, PexRequest( output_filename="lambdex.pex", internal_only=True, requirements=PexRequirements(lambdex.all_requirements), interpreter_constraints=PexInterpreterConstraints(lambdex.interpreter_constraints), entry_point=lambdex.entry_point, ), ) return LambdexSetup(requirements_pex=requirements_pex)
def create_pex_and_get_all_data( rule_runner: RuleRunner, *, pex_type: type[Pex | VenvPex] = Pex, requirements: PexRequirements = PexRequirements(), main: MainSpecification | None = None, interpreter_constraints: PexInterpreterConstraints = PexInterpreterConstraints(), platforms: PexPlatforms = PexPlatforms(), sources: Digest | None = None, additional_inputs: Digest | None = None, additional_pants_args: Tuple[str, ...] = (), additional_pex_args: Tuple[str, ...] = (), env: Mapping[str, str] | None = None, internal_only: bool = True, ) -> Dict: request = PexRequest( output_filename="test.pex", internal_only=internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=platforms, main=main, sources=sources, additional_inputs=additional_inputs, additional_args=additional_pex_args, ) rule_runner.set_options( ["--backend-packages=pants.backend.python", *additional_pants_args], env=env, env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) pex = rule_runner.request(pex_type, [request]) if isinstance(pex, Pex): digest = pex.digest elif isinstance(pex, VenvPex): digest = pex.digest else: raise AssertionError( f"Expected a Pex or a VenvPex but got a {type(pex)}.") rule_runner.scheduler.write_digest(digest) pex_path = os.path.join(rule_runner.build_root, "test.pex") with zipfile.ZipFile(pex_path, "r") as zipfp: with zipfp.open("PEX-INFO", "r") as pex_info: pex_info_content = pex_info.readline().decode() pex_list = zipfp.namelist() return { "pex": pex, "local_path": pex_path, "info": json.loads(pex_info_content), "files": pex_list, }
async def setup_setuptools(setuptools: Setuptools) -> SetuptoolsSetup: # Note that this pex has no entrypoint. We use it to run our generated setup.py, which # in turn imports from and invokes setuptools. requirements_pex = await Get( Pex, PexRequest( output_filename="setuptools.pex", internal_only=True, requirements=PexRequirements(setuptools.all_requirements), interpreter_constraints=PexInterpreterConstraints(setuptools.interpreter_constraints), ), ) return SetuptoolsSetup( requirements_pex=requirements_pex, )
def test_pex_environment(rule_runner: RuleRunner, pex_type: type[Pex | VenvPex]) -> None: sources = rule_runner.request( Digest, [ CreateDigest( ( FileContent( path="main.py", content=textwrap.dedent( """ from os import environ print(f"LANG={environ.get('LANG')}") print(f"ftp_proxy={environ.get('ftp_proxy')}") """ ).encode(), ), ) ), ], ) pex_output = create_pex_and_get_all_data( rule_runner, pex_type=pex_type, main=EntryPoint("main"), sources=sources, additional_pants_args=( "--subprocess-environment-env-vars=LANG", # Value should come from environment. "--subprocess-environment-env-vars=ftp_proxy=dummyproxy", ), interpreter_constraints=PexInterpreterConstraints(["CPython>=3.6"]), env={"LANG": "es_PY.UTF-8"}, ) pex = pex_output["pex"] pex_process_type = PexProcess if isinstance(pex, Pex) else VenvPexProcess process = rule_runner.request( Process, [ pex_process_type( pex, description="Run the pex and check its reported environment", ), ], ) result = rule_runner.request(ProcessResult, [process]) assert b"LANG=es_PY.UTF-8" in result.stdout assert b"ftp_proxy=dummyproxy" in result.stdout
async def infer_python_dependencies_via_imports( request: InferPythonImportDependencies, python_infer_subsystem: PythonInferSubsystem, python_setup: PythonSetup, ) -> InferredDependencies: if not python_infer_subsystem.imports: return InferredDependencies([], sibling_dependencies_inferrable=False) wrapped_tgt = await Get(WrappedTarget, Address, request.sources_field.address) explicitly_provided_deps, detected_imports = await MultiGet( Get(ExplicitlyProvidedDependencies, DependenciesRequest(wrapped_tgt.target[Dependencies])), Get( ParsedPythonImports, ParsePythonImportsRequest( request.sources_field, PexInterpreterConstraints.create_from_targets( [wrapped_tgt.target], python_setup), ), ), ) relevant_imports = tuple( imp for imp in (detected_imports.all_imports if python_infer_subsystem. string_imports else detected_imports.explicit_imports) if imp not in combined_stdlib) owners_per_import = await MultiGet( Get(PythonModuleOwners, PythonModule(imported_module)) for imported_module in relevant_imports) merged_result: set[Address] = set() for owners, imp in zip(owners_per_import, relevant_imports): merged_result.update(owners.unambiguous) address = wrapped_tgt.target.address explicitly_provided_deps.maybe_warn_of_ambiguous_dependency_inference( owners.ambiguous, address, import_reference="module", context=f"The target {address} imports `{imp}`", ) maybe_disambiguated = explicitly_provided_deps.disambiguated_via_ignores( owners.ambiguous) if maybe_disambiguated: merged_result.add(maybe_disambiguated) return InferredDependencies(sorted(merged_result), sibling_dependencies_inferrable=True)
async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup: isort_pex_get = Get( VenvPex, PexRequest( output_filename="isort.pex", internal_only=True, requirements=PexRequirements(isort.all_requirements), interpreter_constraints=PexInterpreterConstraints( isort.interpreter_constraints), main=isort.main, ), ) config_files_get = Get(ConfigFiles, ConfigFilesRequest, isort.config_request) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, isort_pex, config_files = await MultiGet( source_files_get, isort_pex_get, config_files_get) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) input_digest = await Get( Digest, MergeDigests( (source_files_snapshot.digest, config_files.snapshot.digest))) process = await Get( Process, VenvPexProcess( isort_pex, argv=generate_args(source_files=source_files, isort=isort, check_only=setup_request.check_only), input_digest=input_digest, output_files=source_files_snapshot.files, description= f"Run isort on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
def get_distributions(rule_runner: RuleRunner, *, requirements: Iterable[str], constraints: Iterable[str]) -> ExtractedPexDistributions: # NB: The constraints are important for determinism. rule_runner.set_options([ "--backend-packages=pants.backend.python", "--python-setup-requirement-constraints=constraints.txt", ]) rule_runner.create_file("constraints.txt", "\n".join(constraints)) pex_request = PexRequest( output_filename="test.pex", requirements=PexRequirements(requirements), interpreter_constraints=PexInterpreterConstraints([">=3.6"]), internal_only=True, ) built_pex = rule_runner.request(Pex, [pex_request]) return rule_runner.request(ExtractedPexDistributions, [built_pex])
async def setup_docformatter(setup_request: SetupRequest, docformatter: Docformatter) -> Setup: docformatter_pex_request = Get( VenvPex, PexRequest( output_filename="docformatter.pex", internal_only=True, requirements=PexRequirements(docformatter.all_requirements), interpreter_constraints=PexInterpreterConstraints( docformatter.interpreter_constraints), main=docformatter.main, ), ) source_files_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, docformatter_pex = await MultiGet(source_files_request, docformatter_pex_request) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) process = await Get( Process, VenvPexProcess( docformatter_pex, argv=generate_args( source_files=source_files, docformatter=docformatter, check_only=setup_request.check_only, ), input_digest=source_files_snapshot.digest, output_files=source_files_snapshot.files, description= (f"Run Docformatter on {pluralize(len(setup_request.request.field_sets), 'file')}." ), level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
async def bandit_lint(request: BanditRequest, bandit: Bandit, python_setup: PythonSetup) -> LintResults: if bandit.skip: return LintResults([], linter_name="Bandit") # NB: Bandit output depends upon which Python interpreter version it's run with # ( https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit). We # batch targets by their constraints to ensure, for example, that all Python 2 targets run # together and all Python 3 targets run together. constraints_to_field_sets = PexInterpreterConstraints.group_field_sets_by_constraints( request.field_sets, python_setup) partitioned_results = await MultiGet( Get(LintResult, BanditPartition(partition_field_sets, partition_compatibility)) for partition_compatibility, partition_field_sets in constraints_to_field_sets.items()) return LintResults(partitioned_results, linter_name="Bandit")
async def flake8_lint(request: Flake8Request, flake8: Flake8, python_setup: PythonSetup) -> LintResults: if flake8.skip: return LintResults([], linter_name="Flake8") # NB: Flake8 output depends upon which Python interpreter version it's run with # (http://flake8.pycqa.org/en/latest/user/invocation.html). We batch targets by their # constraints to ensure, for example, that all Python 2 targets run together and all Python 3 # targets run together. constraints_to_field_sets = PexInterpreterConstraints.group_field_sets_by_constraints( request.field_sets, python_setup) partitioned_results = await MultiGet( Get(LintResult, Flake8Partition(partition_field_sets, partition_compatibility)) for partition_compatibility, partition_field_sets in constraints_to_field_sets.items()) return LintResults(partitioned_results, linter_name="Flake8")
def test_platforms(rule_runner: RuleRunner) -> None: # We use Python 2.7, rather than Python 3, to ensure that the specified platform is # actually used. platforms = PexPlatforms(["linux-x86_64-cp-27-cp27mu"]) constraints = PexInterpreterConstraints(["CPython>=2.7,<3", "CPython>=3.6"]) pex_output = create_pex_and_get_all_data( rule_runner, requirements=PexRequirements(["cryptography==2.9"]), platforms=platforms, interpreter_constraints=constraints, internal_only=False, # Internal only PEXes do not support (foreign) platforms. ) assert any( "cryptography-2.9-cp27-cp27mu-manylinux2010_x86_64.whl" in fp for fp in pex_output["files"] ) assert not any("cryptography-2.9-cp27-cp27m-" in fp for fp in pex_output["files"]) assert not any("cryptography-2.9-cp35-abi3" in fp for fp in pex_output["files"]) # NB: Platforms override interpreter constraints. assert pex_output["info"]["interpreter_constraints"] == []
def create_pex_and_get_all_data( rule_runner: RuleRunner, *, requirements=PexRequirements(), entry_point=None, interpreter_constraints=PexInterpreterConstraints(), platforms=PexPlatforms(), sources: Optional[Digest] = None, additional_inputs: Optional[Digest] = None, additional_pants_args: Tuple[str, ...] = (), additional_pex_args: Tuple[str, ...] = (), env: Optional[Mapping[str, str]] = None, internal_only: bool = True, ) -> Dict: request = PexRequest( output_filename="test.pex", internal_only=internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=platforms, entry_point=entry_point, sources=sources, additional_inputs=additional_inputs, additional_args=additional_pex_args, ) rule_runner.set_options( ["--backend-packages=pants.backend.python", *additional_pants_args], env=env) pex = rule_runner.request(Pex, [request]) rule_runner.scheduler.write_digest(pex.digest) pex_path = os.path.join(rule_runner.build_root, "test.pex") with zipfile.ZipFile(pex_path, "r") as zipfp: with zipfp.open("PEX-INFO", "r") as pex_info: pex_info_content = pex_info.readline().decode() pex_list = zipfp.namelist() return { "pex": pex, "local_path": pex_path, "info": json.loads(pex_info_content), "files": pex_list, }
def assert_imports_parsed( rule_runner: RuleRunner, content: Optional[str], *, expected_explicit: List[str], expected_string: List[str], filename: str = "project/foo.py", constraints: str = ">=3.6", ): if content: rule_runner.create_file(filename, content) rule_runner.set_options([], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) rule_runner.add_to_build_file("project", "python_library(sources=['**/*.py'])") tgt = rule_runner.get_target(Address("project")) imports = rule_runner.request( ParsedPythonImports, [ ParsePythonImportsRequest(tgt[PythonSources], PexInterpreterConstraints([constraints])) ], ) assert set(imports.explicit_imports) == set(expected_explicit) assert set(imports.string_imports) == set(expected_string)
def test_merge_interpreter_constraints() -> None: def assert_merged(*, inp: List[List[str]], expected: List[str]) -> None: result = sorted( str(req) for req in PexInterpreterConstraints.merge_constraint_sets(inp)) # Requirement.parse() sorts specs differently than we'd like, so we convert each str to a # Requirement. normalized_expected = sorted( str(Requirement.parse(v)) for v in expected) assert result == normalized_expected # Multiple constraint sets get merged so that they are ANDed. # A & B => A & B assert_merged(inp=[["CPython==2.7.*"], ["CPython==3.6.*"]], expected=["CPython==2.7.*,==3.6.*"]) # Multiple constraints within a single constraint set are kept separate so that they are ORed. # A | B => A | B assert_merged(inp=[["CPython==2.7.*", "CPython==3.6.*"]], expected=["CPython==2.7.*", "CPython==3.6.*"]) # Input constraints already were ANDed. # A => A assert_merged(inp=[["CPython>=2.7,<3"]], expected=["CPython>=2.7,<3"]) # Both AND and OR. # (A | B) & C => (A & B) | (B & C) assert_merged( inp=[["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*"]], expected=["CPython>=2.7,<3,==3.6.*", "CPython>=3.5,==3.6.*"], ) # A & B & (C | D) => (A & B & C) | (A & B & D) assert_merged( inp=[["CPython==2.7.*"], ["CPython==3.6.*"], ["CPython==3.7.*", "CPython==3.8.*"]], expected=[ "CPython==2.7.*,==3.6.*,==3.7.*", "CPython==2.7.*,==3.6.*,==3.8.*" ], ) # (A | B) & (C | D) => (A & C) | (A & D) | (B & C) | (B & D) assert_merged( inp=[["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*", "CPython==3.7.*"]], expected=[ "CPython>=2.7,<3,==3.6.*", "CPython>=2.7,<3,==3.7.*", "CPython>=3.5,==3.6.*", "CPython>=3.5,==3.7.*", ], ) # A & (B | C | D) & (E | F) & G => # (A & B & E & G) | (A & B & F & G) | (A & C & E & G) | (A & C & F & G) | (A & D & E & G) | (A & D & F & G) assert_merged( inp=[ ["CPython==3.6.5"], ["CPython==2.7.14", "CPython==2.7.15", "CPython==2.7.16"], ["CPython>=3.6", "CPython==3.5.10"], ["CPython>3.8"], ], expected=[ "CPython==2.7.14,==3.5.10,==3.6.5,>3.8", "CPython==2.7.14,>=3.6,==3.6.5,>3.8", "CPython==2.7.15,==3.5.10,==3.6.5,>3.8", "CPython==2.7.15,>=3.6,==3.6.5,>3.8", "CPython==2.7.16,==3.5.10,==3.6.5,>3.8", "CPython==2.7.16,>=3.6,==3.6.5,>3.8", ], ) # Deduplicate between constraint_sets # (A | B) & (A | B) => A | B. Naively, this should actually resolve as follows: # (A | B) & (A | B) => (A & A) | (A & B) | (B & B) => A | (A & B) | B. # But, we first deduplicate each constraint_set. (A | B) & (A | B) can be rewritten as # X & X => X. assert_merged( inp=[["CPython==2.7.*", "CPython==3.6.*"], ["CPython==2.7.*", "CPython==3.6.*"]], expected=["CPython==2.7.*", "CPython==3.6.*"], ) # (A | B) & C & (A | B) => (A & C) | (B & C). Alternatively, this can be rewritten as # X & Y & X => X & Y. assert_merged( inp=[ ["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*"], ["CPython>=3.5", "CPython>=2.7,<3"], ], expected=["CPython>=2.7,<3,==3.6.*", "CPython>=3.5,==3.6.*"], ) # No specifiers assert_merged(inp=[["CPython"]], expected=["CPython"]) assert_merged(inp=[["CPython"], ["CPython==3.7.*"]], expected=["CPython==3.7.*"]) # No interpreter is shorthand for CPython, which is how Pex behaves assert_merged(inp=[[">=3.5"], ["CPython==3.7.*"]], expected=["CPython>=3.5,==3.7.*"]) # Different Python interpreters, which are guaranteed to fail when ANDed but are safe when ORed. with pytest.raises(ValueError): PexInterpreterConstraints.merge_constraint_sets([["CPython==3.7.*"], ["PyPy==43.0"]]) assert_merged(inp=[["CPython==3.7.*", "PyPy==43.0"]], expected=["CPython==3.7.*", "PyPy==43.0"]) # Ensure we can handle empty input. assert_merged(inp=[], expected=[])