async def run_pyupgrade(request: PyUpgradeRequest, pyupgrade: PyUpgrade) -> PyUpgradeResult: pyupgrade_pex_get = Get(VenvPex, PexRequest, pyupgrade.to_pex_request()) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.source for field_set in request.field_sets), ) source_files, pyupgrade_pex = await MultiGet(source_files_get, pyupgrade_pex_get) source_files_snapshot = (source_files.snapshot if request.prior_formatter_result is None else request.prior_formatter_result) result = await Get( FallibleProcessResult, VenvPexProcess( pyupgrade_pex, argv=(*pyupgrade.args, *source_files.files), input_digest=source_files_snapshot.digest, output_files=source_files_snapshot.files, description= f"Run pyupgrade on {pluralize(len(request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return PyUpgradeResult(result, original_snapshot=source_files_snapshot)
async def yapf_fmt(request: YapfRequest, yapf: Yapf) -> FmtResult: if yapf.skip: return FmtResult.skip(formatter_name=request.name) yapf_pex_get = Get(VenvPex, PexRequest, yapf.to_pex_request()) config_files_get = Get(ConfigFiles, ConfigFilesRequest, yapf.config_request(request.snapshot.dirs)) yapf_pex, config_files = await MultiGet(yapf_pex_get, config_files_get) input_digest = await Get( Digest, MergeDigests((request.snapshot.digest, config_files.snapshot.digest))) result = await Get( ProcessResult, VenvPexProcess( yapf_pex, argv=( *yapf.args, "--in-place", *(("--style", yapf.config) if yapf.config else ()), *request.snapshot.files, ), input_digest=input_digest, output_files=request.snapshot.files, description= f"Run yapf on {pluralize(len(request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) output_snapshot = await Get(Snapshot, Digest, result.output_digest) return FmtResult.create(request, result, output_snapshot)
async def setup_autoflake(setup_request: SetupRequest, autoflake: Autoflake) -> Setup: autoflake_pex_get = Get(VenvPex, PexRequest, autoflake.to_pex_request()) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.source for field_set in setup_request.request.field_sets), ) source_files, autoflake_pex = await MultiGet(source_files_get, autoflake_pex_get) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) process = await Get( Process, VenvPexProcess( autoflake_pex, argv=generate_argv(source_files, autoflake, check_only=setup_request.check_only), input_digest=source_files_snapshot.digest, output_files=source_files_snapshot.files, description= f"Run Autoflake on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_snapshot=source_files_snapshot)
async def setup_black(setup_request: SetupRequest, black: Black, python_setup: PythonSetup) -> Setup: # Black requires 3.6+ but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6, and 3.7. # However, typed-ast does not understand 3.8+, so instead we must run Black with Python 3.8+ # when relevant. We only do this if if <3.8 can't be used, as we don't want a loose requirement # like `>=3.6` to result in requiring Python 3.8, which would error if 3.8 is not installed on # the machine. all_interpreter_constraints = InterpreterConstraints.create_from_compatibility_fields( (field_set.interpreter_constraints for field_set in setup_request.request.field_sets), python_setup, ) tool_interpreter_constraints = (all_interpreter_constraints if ( black.options.is_default("interpreter_constraints") and all_interpreter_constraints.requires_python38_or_newer( python_setup.interpreter_universe)) else black.interpreter_constraints) black_pex_get = Get( VenvPex, PexRequest, black.to_pex_request( interpreter_constraints=tool_interpreter_constraints), ) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.source for field_set in setup_request.request.field_sets), ) source_files, black_pex = await MultiGet(source_files_get, black_pex_get) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) config_files = await Get(ConfigFiles, ConfigFilesRequest, black.config_request(source_files_snapshot.dirs)) input_digest = await Get( Digest, MergeDigests( (source_files_snapshot.digest, config_files.snapshot.digest))) process = await Get( Process, VenvPexProcess( black_pex, argv=generate_argv(source_files, black, check_only=setup_request.check_only), input_digest=input_digest, output_files=source_files_snapshot.files, concurrency_available=len(setup_request.request.field_sets), description= f"Run Black on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_snapshot=source_files_snapshot)
async def run_setup_py(req: RunSetupPyRequest, setuptools: Setuptools) -> RunSetupPyResult: """Run a setup.py command on a single exported target.""" # Note that this pex has no entrypoint. We use it to run our generated setup.py, which # in turn imports from and invokes setuptools. setuptools_pex = await Get( VenvPex, PexRequest( output_filename="setuptools.pex", internal_only=True, requirements=PexRequirements(setuptools.all_requirements), interpreter_constraints=req.interpreter_constraints, ), ) # The setuptools dist dir, created by it under the chroot (not to be confused with # pants's own dist dir, at the buildroot). dist_dir = "dist/" result = await Get( ProcessResult, VenvPexProcess( setuptools_pex, argv=("setup.py", *req.args), input_digest=req.chroot.digest, # setuptools commands that create dists write them to the distdir. # TODO: Could there be other useful files to capture? output_directories=(dist_dir,), description=f"Run setuptools for {req.exported_target.target.address}", level=LogLevel.DEBUG, ), ) output_digest = await Get(Digest, RemovePrefix(result.output_digest, dist_dir)) return RunSetupPyResult(output_digest)
async def resolve_plugins( request: PluginsRequest, global_options: GlobalOptions) -> ResolvedPluginDistributions: """This rule resolves plugins using a VenvPex, and exposes the absolute paths of their dists. NB: This relies on the fact that PEX constructs venvs in a stable location (within the `named_caches` directory), but consequently needs to disable the process cache: see the ProcessCacheScope reference in the body. """ requirements = PexRequirements( req_strings=sorted(global_options.plugins), constraints_strings=(str(constraint) for constraint in request.constraints), ) if not requirements: return ResolvedPluginDistributions() python: PythonExecutable | None = None if not request.interpreter_constraints: python = cast( PythonExecutable, PythonExecutable.fingerprinted( sys.executable, ".".join(map(str, sys.version_info[:3])).encode("utf8")), ) plugins_pex = await Get( VenvPex, PexRequest( output_filename="pants_plugins.pex", internal_only=True, python=python, requirements=requirements, interpreter_constraints=request.interpreter_constraints, description= f"Resolving plugins: {', '.join(requirements.req_strings)}", ), ) # NB: We run this Process per-restart because it (intentionally) leaks named cache # paths in a way that invalidates the Process-cache. See the method doc. cache_scope = (ProcessCacheScope.PER_SESSION if global_options.plugins_force_resolve else ProcessCacheScope.PER_RESTART_SUCCESSFUL) plugins_process_result = await Get( ProcessResult, VenvPexProcess( plugins_pex, argv= ("-c", "import os, site; print(os.linesep.join(site.getsitepackages()))" ), description="Extracting plugin locations", level=LogLevel.DEBUG, cache_scope=cache_scope, ), ) return ResolvedPluginDistributions( plugins_process_result.stdout.decode().strip().split("\n"))
async def autoflake_fmt(request: AutoflakeRequest, autoflake: Autoflake) -> FmtResult: if autoflake.skip: return FmtResult.skip(formatter_name=request.name) autoflake_pex = await Get(VenvPex, PexRequest, autoflake.to_pex_request()) result = await Get( ProcessResult, VenvPexProcess( autoflake_pex, argv=( "--in-place", "--remove-all-unused-imports", *autoflake.args, *request.snapshot.files, ), input_digest=request.snapshot.digest, output_files=request.snapshot.files, description= f"Run Autoflake on {pluralize(len(request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) output_snapshot = await Get(Snapshot, Digest, result.output_digest) return FmtResult.create(request, result, output_snapshot, strip_chroot_path=True)
async def flake8_lint_partition( partition: Flake8Partition, flake8: Flake8, lint_subsystem: LintSubsystem ) -> LintResult: flake8_pex_get = Get( VenvPex, PexRequest( output_filename="flake8.pex", internal_only=True, requirements=PexRequirements(flake8.all_requirements), interpreter_constraints=partition.interpreter_constraints, main=flake8.main, ), ) config_files_get = Get(ConfigFiles, ConfigFilesRequest, flake8.config_request) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets) ) flake8_pex, config_files, source_files = await MultiGet( flake8_pex_get, config_files_get, source_files_get ) input_digest = await Get( Digest, MergeDigests((source_files.snapshot.digest, config_files.snapshot.digest)) ) report_file_name = "flake8_report.txt" if lint_subsystem.reports_dir else None result = await Get( FallibleProcessResult, VenvPexProcess( flake8_pex, argv=generate_argv(source_files, flake8, report_file_name=report_file_name), input_digest=input_digest, output_files=(report_file_name,) if report_file_name else None, description=f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) report = None if report_file_name: report_digest = await Get( Digest, DigestSubset( result.output_digest, PathGlobs( [report_file_name], glob_match_error_behavior=GlobMatchErrorBehavior.warn, description_of_origin="Flake8 report file", ), ), ) report = LintReport(report_file_name, report_digest) return LintResult.from_fallible_process_result( result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints)), report=report, )
def get_all_data(rule_runner: RuleRunner, pex: Pex | VenvPex) -> PexData: if isinstance(pex, VenvPex): digest = pex.digest sandbox_path = pex.pex_filename process = rule_runner.request( Process, [ VenvPexProcess( pex, argv=["info"], extra_env=dict(PEX_TOOLS="1"), description="Extract PEX-INFO.", ), ], ) else: digest = pex.digest sandbox_path = pex.name pex_pex = rule_runner.request(PexPEX, []) process = rule_runner.request( Process, [ PexProcess( Pex(digest=pex_pex.digest, name=pex_pex.exe, python=pex.python), argv=["-m", "pex.tools", pex.name, "info"], input_digest=pex.digest, extra_env=dict(PEX_INTERPRETER="1"), description="Extract PEX-INFO.", ) ], ) rule_runner.scheduler.write_digest(digest) local_path = PurePath(rule_runner.build_root) / "test.pex" result = rule_runner.request(ProcessResult, [process]) pex_info_content = result.stdout.decode() is_zipapp = zipfile.is_zipfile(local_path) if is_zipapp: with zipfile.ZipFile(local_path, "r") as zipfp: files = tuple(zipfp.namelist()) else: files = tuple( os.path.normpath( os.path.relpath(os.path.join(root, path), local_path)) for root, dirs, files in os.walk(local_path) for path in dirs + files) return PexData( pex=pex, is_zipapp=is_zipapp, sandbox_path=PurePath(sandbox_path), local_path=local_path, info=json.loads(pex_info_content), files=files, )
async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup: isort_pex_get = Get( VenvPex, PexRequest( output_filename="isort.pex", internal_only=True, requirements=PexRequirements(isort.all_requirements), interpreter_constraints=PexInterpreterConstraints( isort.interpreter_constraints), main=isort.main, ), ) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, isort_pex = await MultiGet(source_files_get, isort_pex_get) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) config_files = await Get(ConfigFiles, ConfigFilesRequest, isort.config_request(source_files_snapshot.dirs)) # Isort 5+ changes how config files are handled. Determine which semantics we should use. is_isort5 = False if isort.config: isort_info = await Get(PexResolveInfo, VenvPex, isort_pex) is_isort5 = any( dist_info.project_name == "isort" and dist_info.version.major >= 5 for dist_info in isort_info) input_digest = await Get( Digest, MergeDigests( (source_files_snapshot.digest, config_files.snapshot.digest))) process = await Get( Process, VenvPexProcess( isort_pex, argv=generate_argv(source_files, isort, is_isort5=is_isort5, check_only=setup_request.check_only), input_digest=input_digest, output_files=source_files_snapshot.files, description= f"Run isort on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
async def flake8_lint_partition( partition: Flake8Partition, flake8: Flake8, first_party_plugins: Flake8FirstPartyPlugins) -> LintResult: flake8_pex_get = Get( VenvPex, PexRequest, flake8.to_pex_request( interpreter_constraints=partition.interpreter_constraints, extra_requirements=first_party_plugins.requirement_strings, ), ) config_files_get = Get(ConfigFiles, ConfigFilesRequest, flake8.config_request) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.source for field_set in partition.field_sets)) # Ensure that the empty report dir exists. report_directory_digest_get = Get(Digest, CreateDigest([Directory(REPORT_DIR)])) flake8_pex, config_files, report_directory, source_files = await MultiGet( flake8_pex_get, config_files_get, report_directory_digest_get, source_files_get) input_digest = await Get( Digest, MergeDigests(( source_files.snapshot.digest, first_party_plugins.sources_digest, config_files.snapshot.digest, report_directory, )), ) result = await Get( FallibleProcessResult, VenvPexProcess( flake8_pex, argv=generate_argv(source_files, flake8), input_digest=input_digest, output_directories=(REPORT_DIR, ), extra_env={"PEX_EXTRA_SYS_PATH": first_party_plugins.PREFIX}, concurrency_available=len(partition.field_sets), description= f"Run Flake8 on {pluralize(len(partition.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR)) return LintResult.from_fallible_process_result( result, partition_description=str( sorted(str(c) for c in partition.interpreter_constraints)), report=report, )
async def setup_process_for_parse_terraform_module_sources( request: ParseTerraformModuleSources, parser: ParserSetup) -> Process: process = await Get( Process, VenvPexProcess( parser.pex, argv=request.paths, input_digest=request.sources_digest, description="Parse Terraform module sources.", ), ) return process
async def export_venv(request: ExportedVenvRequest, python_setup: PythonSetup, pex_env: PexEnvironment) -> ExportableData: # Pick a single interpreter for the venv. interpreter_constraints = InterpreterConstraints.create_from_targets( request.targets, python_setup) if not interpreter_constraints: # If there were no targets that defined any constraints, fall back to the global ones. interpreter_constraints = InterpreterConstraints( python_setup.interpreter_constraints) min_interpreter = interpreter_constraints.snap_to_minimum( python_setup.interpreter_universe) if not min_interpreter: raise ExportError( "The following interpreter constraints were computed for all the targets for which " f"export was requested: {interpreter_constraints}. There is no python interpreter " "compatible with these constraints. Please restrict the target set to one that shares " "a compatible interpreter.") venv_pex = await Get( VenvPex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (tgt.address for tgt in request.targets), internal_only=True, hardcoded_interpreter_constraints=min_interpreter, ), ) complete_pex_env = pex_env.in_workspace() venv_abspath = os.path.join(complete_pex_env.pex_root, venv_pex.venv_rel_dir) # Run the venv_pex to get the full python version (including patch #), so we # can use it in the symlink name. res = await Get( ProcessResult, VenvPexProcess( venv_pex=venv_pex, description="Create virtualenv", argv=[ "-c", "import sys; print('.'.join(str(x) for x in sys.version_info[0:3]))" ], input_digest=venv_pex.digest, ), ) py_version = res.stdout.strip().decode() return ExportableData( f"virtualenv for {min_interpreter}", os.path.join("python", "virtualenv"), symlinks=[Symlink(venv_abspath, py_version)], )
async def bandit_lint_partition(partition: BanditPartition, bandit: Bandit) -> LintResult: bandit_pex_get = Get( VenvPex, PexRequest( output_filename="bandit.pex", internal_only=True, requirements=bandit.pex_requirements(), interpreter_constraints=partition.interpreter_constraints, main=bandit.main, ), ) config_files_get = Get(ConfigFiles, ConfigFilesRequest, bandit.config_request) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.source for field_set in partition.field_sets)) # Ensure that the empty report dir exists. report_directory_digest_get = Get(Digest, CreateDigest([Directory(REPORT_DIR)])) bandit_pex, config_files, report_directory, source_files = await MultiGet( bandit_pex_get, config_files_get, report_directory_digest_get, source_files_get) input_digest = await Get( Digest, MergeDigests((source_files.snapshot.digest, config_files.snapshot.digest, report_directory)), ) result = await Get( FallibleProcessResult, VenvPexProcess( bandit_pex, argv=generate_argv(source_files, bandit), input_digest=input_digest, description= f"Run Bandit on {pluralize(len(partition.field_sets), 'file')}.", output_directories=(REPORT_DIR, ), level=LogLevel.DEBUG, ), ) report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR)) return LintResult.from_fallible_process_result( result, partition_description=str( sorted(str(c) for c in partition.interpreter_constraints)), report=report, )
async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup: isort_pex_request = Get( VenvPex, PexRequest( output_filename="isort.pex", internal_only=True, requirements=PexRequirements(isort.all_requirements), interpreter_constraints=PexInterpreterConstraints(isort.interpreter_constraints), main=isort.main, ), ) config_digest_request = Get( Digest, PathGlobs( globs=isort.config, glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the option `--isort-config`", ), ) source_files_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, isort_pex, config_digest = await MultiGet( source_files_request, isort_pex_request, config_digest_request ) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result ) input_digest = await Get(Digest, MergeDigests((source_files_snapshot.digest, config_digest))) process = await Get( Process, VenvPexProcess( isort_pex, argv=generate_args( source_files=source_files, isort=isort, check_only=setup_request.check_only ), input_digest=input_digest, output_files=source_files_snapshot.files, description=f"Run isort on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
async def setup_process_for_parse_dockerfile(request: DockerfileParseRequest, parser: ParserSetup) -> Process: process = await Get( Process, VenvPexProcess( parser.pex, argv=request.args, description="Parse Dockerfile.", input_digest=request.sources_digest, level=LogLevel.DEBUG, ), ) return process
async def format_build_file_with_black(request: FormatWithBlackRequest, black: Black) -> RewrittenBuildFile: black_ics = await _find_python_interpreter_constraints_from_lockfile(black) black_pex_get = Get( VenvPex, PexRequest, black.to_pex_request(interpreter_constraints=black_ics)) build_file_digest_get = Get(Digest, CreateDigest([request.to_file_content()])) config_files_get = Get( ConfigFiles, ConfigFilesRequest, black.config_request(recursive_dirname(request.path))) black_pex, build_file_digest, config_files = await MultiGet( black_pex_get, build_file_digest_get, config_files_get) input_digest = await Get( Digest, MergeDigests( (build_file_digest, config_files.snapshot.digest))) argv = [] if black.config: argv.extend(["--config", black.config]) argv.extend(black.args) argv.append(request.path) black_result = await Get( ProcessResult, VenvPexProcess( black_pex, argv=argv, input_digest=input_digest, output_files=(request.path, ), description=f"Run Black on {request.path}.", level=LogLevel.DEBUG, ), ) if black_result.output_digest == build_file_digest: return RewrittenBuildFile(request.path, request.lines, change_descriptions=()) result_contents = await Get(DigestContents, Digest, black_result.output_digest) assert len(result_contents) == 1 result_lines = tuple( result_contents[0].content.decode("utf-8").splitlines()) return RewrittenBuildFile(request.path, result_lines, change_descriptions=("Format with Black", ))
async def setup_isort(setup_request: SetupRequest, isort: Isort) -> Setup: isort_pex_get = Get( VenvPex, PexRequest( output_filename="isort.pex", internal_only=True, requirements=PexRequirements(isort.all_requirements), interpreter_constraints=PexInterpreterConstraints( isort.interpreter_constraints), main=isort.main, ), ) config_files_get = Get(ConfigFiles, ConfigFilesRequest, isort.config_request) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, isort_pex, config_files = await MultiGet( source_files_get, isort_pex_get, config_files_get) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) input_digest = await Get( Digest, MergeDigests( (source_files_snapshot.digest, config_files.snapshot.digest))) process = await Get( Process, VenvPexProcess( isort_pex, argv=generate_args(source_files=source_files, isort=isort, check_only=setup_request.check_only), input_digest=input_digest, output_files=source_files_snapshot.files, description= f"Run isort on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
async def setup_yapf(setup_request: SetupRequest, yapf: Yapf) -> Setup: yapf_pex_get = Get( VenvPex, PexRequest( output_filename="yapf.pex", internal_only=True, requirements=yapf.pex_requirements(), interpreter_constraints=yapf.interpreter_constraints, main=yapf.main, ), ) source_files_get = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, yapf_pex = await MultiGet(source_files_get, yapf_pex_get) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) config_files = await Get(ConfigFiles, ConfigFilesRequest, yapf.config_request(source_files_snapshot.dirs)) input_digest = await Get( Digest, MergeDigests( (source_files_snapshot.digest, config_files.snapshot.digest))) process = await Get( Process, VenvPexProcess( yapf_pex, argv=generate_argv( source_files, yapf, check_only=setup_request.check_only, ), input_digest=input_digest, output_files=source_files_snapshot.files, description= f"Run yapf on {pluralize(len(setup_request.request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
async def resolve_plugins( interpreter_constraints: PexInterpreterConstraints, global_options: GlobalOptions) -> ResolvedPluginDistributions: """This rule resolves plugins using a VenvPex, and exposes the absolute paths of their dists. NB: This relies on the fact that PEX constructs venvs in a stable location (within the `named_caches` directory), but consequently needs to disable the process cache: see the ProcessCacheScope reference in the body. """ requirements = PexRequirements(sorted(global_options.options.plugins)) if not requirements: return ResolvedPluginDistributions() plugins_pex = await Get( VenvPex, PexRequest( output_filename="pants_plugins.pex", internal_only=True, requirements=requirements, interpreter_constraints=interpreter_constraints, # The repository's constraints are not relevant here, because this resolve is mixed # into the Pants' process' path, and never into user code. apply_requirement_constraints=False, description=f"Resolving plugins: {', '.join(requirements)}", ), ) # NB: We run this Process per-restart because it (intentionally) leaks named cache # paths in a way that invalidates the Process-cache. See the method doc. cache_scope = (ProcessCacheScope.NEVER if global_options.options.plugins_force_resolve else ProcessCacheScope.PER_RESTART) plugins_process_result = await Get( ProcessResult, VenvPexProcess( plugins_pex, argv= ("-c", "import os, site; print(os.linesep.join(site.getsitepackages()))" ), description="Extracting plugin locations", level=LogLevel.DEBUG, cache_scope=cache_scope, ), ) return ResolvedPluginDistributions( plugins_process_result.stdout.decode().strip().split("\n"))
async def setup_process_for_parse_terraform_module_sources( request: ParseTerraformModuleSources, parser: ParserSetup ) -> Process: dir_paths = ", ".join(sorted(group_by_dir(request.paths).keys())) process = await Get( Process, VenvPexProcess( parser.pex, argv=request.paths, input_digest=request.sources_digest, description=f"Parse Terraform module sources: {dir_paths}", level=LogLevel.DEBUG, ), ) return process
async def setup_docformatter(setup_request: SetupRequest, docformatter: Docformatter) -> Setup: docformatter_pex_request = Get( VenvPex, PexRequest( output_filename="docformatter.pex", internal_only=True, requirements=PexRequirements(docformatter.all_requirements), interpreter_constraints=PexInterpreterConstraints( docformatter.interpreter_constraints), main=docformatter.main, ), ) source_files_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in setup_request.request.field_sets), ) source_files, docformatter_pex = await MultiGet(source_files_request, docformatter_pex_request) source_files_snapshot = ( source_files.snapshot if setup_request.request.prior_formatter_result is None else setup_request.request.prior_formatter_result) process = await Get( Process, VenvPexProcess( docformatter_pex, argv=generate_args( source_files=source_files, docformatter=docformatter, check_only=setup_request.check_only, ), input_digest=source_files_snapshot.digest, output_files=source_files_snapshot.files, description= (f"Run Docformatter on {pluralize(len(setup_request.request.field_sets), 'file')}." ), level=LogLevel.DEBUG, ), ) return Setup(process, original_digest=source_files_snapshot.digest)
async def format_build_file_with_yapf(request: FormatWithYapfRequest, yapf: Yapf) -> RewrittenBuildFile: yapf_pex_get = Get(VenvPex, PexRequest, yapf.to_pex_request()) build_file_digest_get = Get(Digest, CreateDigest([request.to_file_content()])) config_files_get = Get( ConfigFiles, ConfigFilesRequest, yapf.config_request(recursive_dirname(request.path))) yapf_pex, build_file_digest, config_files = await MultiGet( yapf_pex_get, build_file_digest_get, config_files_get) input_digest = await Get( Digest, MergeDigests( (build_file_digest, config_files.snapshot.digest))) argv = ["--in-place"] if yapf.config: argv.extend(["--config", yapf.config]) argv.extend(yapf.args) argv.append(request.path) yapf_result = await Get( ProcessResult, VenvPexProcess( yapf_pex, argv=argv, input_digest=input_digest, output_files=(request.path, ), description=f"Run Yapf on {request.path}.", level=LogLevel.DEBUG, ), ) if yapf_result.output_digest == build_file_digest: return RewrittenBuildFile(request.path, request.lines, change_descriptions=()) result_contents = await Get(DigestContents, Digest, yapf_result.output_digest) assert len(result_contents) == 1 result_lines = tuple( result_contents[0].content.decode("utf-8").splitlines()) return RewrittenBuildFile(request.path, result_lines, change_descriptions=("Format with Yapf", ))
async def docformatter_fmt(request: DocformatterRequest, docformatter: Docformatter) -> FmtResult: if docformatter.skip: return FmtResult.skip(formatter_name=request.name) docformatter_pex = await Get(VenvPex, PexRequest, docformatter.to_pex_request()) result = await Get( ProcessResult, VenvPexProcess( docformatter_pex, argv=( "--in-place", *docformatter.args, *request.snapshot.files, ), input_digest=request.snapshot.digest, output_files=request.snapshot.files, description=(f"Run Docformatter on {pluralize(len(request.field_sets), 'file')}."), level=LogLevel.DEBUG, ), ) output_snapshot = await Get(Snapshot, Digest, result.output_digest) return FmtResult.create(request, result, output_snapshot)
async def pyupgrade_fmt(request: PyUpgradeRequest, pyupgrade: PyUpgrade) -> FmtResult: if pyupgrade.skip: return FmtResult.skip(formatter_name=request.name) pyupgrade_pex = await Get(VenvPex, PexRequest, pyupgrade.to_pex_request()) result = await Get( FallibleProcessResult, VenvPexProcess( pyupgrade_pex, argv=(*pyupgrade.args, *request.snapshot.files), input_digest=request.snapshot.digest, output_files=request.snapshot.files, description= f"Run pyupgrade on {pluralize(len(request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) output_snapshot = await Get(Snapshot, Digest, result.output_digest) return FmtResult.create(request, result, output_snapshot)
async def isort_fmt(request: IsortRequest, isort: Isort) -> FmtResult: if isort.skip: return FmtResult.skip(formatter_name=request.name) isort_pex_get = Get(VenvPex, PexRequest, isort.to_pex_request()) config_files_get = Get(ConfigFiles, ConfigFilesRequest, isort.config_request(request.snapshot.dirs)) isort_pex, config_files = await MultiGet(isort_pex_get, config_files_get) # Isort 5+ changes how config files are handled. Determine which semantics we should use. is_isort5 = False if isort.config: isort_info = await Get(PexResolveInfo, VenvPex, isort_pex) is_isort5 = any( dist_info.project_name == "isort" and dist_info.version.major >= 5 for dist_info in isort_info) input_digest = await Get( Digest, MergeDigests((request.snapshot.digest, config_files.snapshot.digest))) result = await Get( ProcessResult, VenvPexProcess( isort_pex, argv=generate_argv(request.snapshot.files, isort, is_isort5=is_isort5), input_digest=input_digest, output_files=request.snapshot.files, description= f"Run isort on {pluralize(len(request.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) output_snapshot = await Get(Snapshot, Digest, result.output_digest) return FmtResult.create(request, result, output_snapshot, strip_chroot_path=True)
async def merge_coverage_data( data_collection: PytestCoverageDataCollection, coverage_setup: CoverageSetup) -> MergedCoverageData: if len(data_collection) == 1: return MergedCoverageData(data_collection[0].digest) # We prefix each .coverage file with its corresponding address to avoid collisions. coverage_digests = await MultiGet( Get(Digest, AddPrefix(data.digest, prefix=data.address.path_safe_spec)) for data in data_collection) input_digest = await Get(Digest, MergeDigests(coverage_digests)) prefixes = sorted(f"{data.address.path_safe_spec}/.coverage" for data in data_collection) result = await Get( ProcessResult, VenvPexProcess( coverage_setup.pex, argv=("combine", *prefixes), input_digest=input_digest, output_files=(".coverage", ), description=f"Merge {len(prefixes)} Pytest coverage reports.", level=LogLevel.DEBUG, ), ) return MergedCoverageData(result.output_digest)
async def generate_coverage_reports( merged_coverage_data: MergedCoverageData, coverage_setup: CoverageSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, global_options: GlobalOptions, ) -> CoverageReports: """Takes all Python test results and generates a single coverage report.""" transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest(merged_coverage_data.addresses)) sources = await Get( PythonSourceFiles, # Coverage sometimes includes non-Python files in its `.coverage` data. We need to # ensure that they're present when generating the report. We include all the files included # by `pytest_runner.py`. PythonSourceFilesRequest(transitive_targets.closure, include_files=True, include_resources=True), ) input_digest = await Get( Digest, MergeDigests(( merged_coverage_data.coverage_data, coverage_config.digest, sources.source_files.snapshot.digest, )), ) pex_processes = [] report_types = [] result_snapshot = await Get(Snapshot, Digest, merged_coverage_data.coverage_data) coverage_reports: list[CoverageReport] = [] for report_type in coverage_subsystem.reports: if report_type == CoverageReportType.RAW: coverage_reports.append( FilesystemCoverageReport( # We don't know yet if the coverage is sufficient, so we let some other report # trigger the failure if necessary. coverage_insufficient=False, report_type=CoverageReportType.RAW.value, result_snapshot=result_snapshot, directory_to_materialize_to=coverage_subsystem.output_dir, report_file=coverage_subsystem.output_dir / ".coverage", )) continue report_types.append(report_type) output_file = (f"coverage.{report_type.value}" if report_type in { CoverageReportType.XML, CoverageReportType.JSON } else None) args = [report_type.report_name, f"--rcfile={coverage_config.path}"] if coverage_subsystem.fail_under is not None: args.append(f"--fail-under={coverage_subsystem.fail_under}") pex_processes.append( VenvPexProcess( coverage_setup.pex, argv=tuple(args), input_digest=input_digest, output_directories=("htmlcov", ) if report_type == CoverageReportType.HTML else None, output_files=(output_file, ) if output_file else None, description= f"Generate Pytest {report_type.report_name} coverage report.", level=LogLevel.DEBUG, )) results = await MultiGet( Get(FallibleProcessResult, VenvPexProcess, process) for process in pex_processes) for proc, res in zip(pex_processes, results): if res.exit_code not in {0, 2}: # coverage.py uses exit code 2 if --fail-under triggers, in which case the # reports are still generated. raise ProcessExecutionFailure( res.exit_code, res.stdout, res.stderr, proc.description, local_cleanup=global_options.options. process_execution_local_cleanup, ) # In practice if one result triggers --fail-under, they all will, but no need to rely on that. result_exit_codes = tuple(res.exit_code for res in results) result_stdouts = tuple(res.stdout for res in results) result_snapshots = await MultiGet( Get(Snapshot, Digest, res.output_digest) for res in results) coverage_reports.extend( _get_coverage_report(coverage_subsystem.output_dir, report_type, exit_code != 0, stdout, snapshot) for (report_type, exit_code, stdout, snapshot) in zip( report_types, result_exit_codes, result_stdouts, result_snapshots)) return CoverageReports(tuple(coverage_reports))
async def merge_coverage_data( data_collection: PytestCoverageDataCollection, coverage_setup: CoverageSetup, coverage: CoverageSubsystem, source_roots: AllSourceRoots, ) -> MergedCoverageData: if len(data_collection) == 1 and not coverage.global_report: coverage_data = data_collection[0] return MergedCoverageData(coverage_data.digest, (coverage_data.address, )) coverage_digest_gets = [] coverage_data_file_paths = [] addresses = [] for data in data_collection: # We prefix each .coverage file with its corresponding address to avoid collisions. coverage_digest_gets.append( Get(Digest, AddPrefix(data.digest, prefix=data.address.path_safe_spec))) coverage_data_file_paths.append( f"{data.address.path_safe_spec}/.coverage") addresses.append(data.address) if coverage.global_report: global_coverage_base_dir = PurePath("__global_coverage__") global_coverage_config_path = global_coverage_base_dir / "pyproject.toml" global_coverage_config_content = toml.dumps({ "tool": { "coverage": { "run": { "relative_files": True, "source": list(source_root.path for source_root in source_roots), } } } }).encode() no_op_exe_py_path = global_coverage_base_dir / "no-op-exe.py" all_sources_digest, no_op_exe_py_digest, global_coverage_config_digest = await MultiGet( Get( Digest, PathGlobs(globs=[ f"{source_root.path}/**/*.py" for source_root in source_roots ]), ), Get( Digest, CreateDigest( [FileContent(path=str(no_op_exe_py_path), content=b"")])), Get( Digest, CreateDigest([ FileContent( path=str(global_coverage_config_path), content=global_coverage_config_content, ), ]), ), ) extra_sources_digest = await Get( Digest, MergeDigests((all_sources_digest, no_op_exe_py_digest))) input_digest = await Get( Digest, MergeDigests( (extra_sources_digest, global_coverage_config_digest))) result = await Get( ProcessResult, VenvPexProcess( coverage_setup.pex, argv=("run", "--rcfile", str(global_coverage_config_path), str(no_op_exe_py_path)), input_digest=input_digest, output_files=(".coverage", ), description="Create base global Pytest coverage report.", level=LogLevel.DEBUG, ), ) coverage_digest_gets.append( Get( Digest, AddPrefix(digest=result.output_digest, prefix=str(global_coverage_base_dir)))) coverage_data_file_paths.append( str(global_coverage_base_dir / ".coverage")) else: extra_sources_digest = EMPTY_DIGEST input_digest = await Get( Digest, MergeDigests(await MultiGet(coverage_digest_gets))) result = await Get( ProcessResult, VenvPexProcess( coverage_setup.pex, argv=("combine", *sorted(coverage_data_file_paths)), input_digest=input_digest, output_files=(".coverage", ), description= f"Merge {len(coverage_data_file_paths)} Pytest coverage reports.", level=LogLevel.DEBUG, ), ) return MergedCoverageData( await Get(Digest, MergeDigests((result.output_digest, extra_sources_digest))), tuple(addresses), )
def create_pex_and_get_all_data( rule_runner: RuleRunner, *, pex_type: type[Pex | VenvPex] = Pex, requirements: PexRequirements = PexRequirements(), main: MainSpecification | None = None, interpreter_constraints: PexInterpreterConstraints = PexInterpreterConstraints(), platforms: PexPlatforms = PexPlatforms(), sources: Digest | None = None, additional_inputs: Digest | None = None, additional_pants_args: Tuple[str, ...] = (), additional_pex_args: Tuple[str, ...] = (), env: Mapping[str, str] | None = None, internal_only: bool = True, ) -> Dict: request = PexRequest( output_filename="test.pex", internal_only=internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=platforms, main=main, sources=sources, additional_inputs=additional_inputs, additional_args=additional_pex_args, ) rule_runner.set_options( ["--backend-packages=pants.backend.python", *additional_pants_args], env=env, env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) pex = rule_runner.request(pex_type, [request]) if isinstance(pex, Pex): digest = pex.digest pex_pex = rule_runner.request(PexPEX, []) process = rule_runner.request( Process, [ PexProcess( Pex(digest=pex_pex.digest, name=pex_pex.exe, python=pex.python), argv=["-m", "pex.tools", pex.name, "info"], input_digest=pex.digest, extra_env=dict(PEX_INTERPRETER="1"), description="Extract PEX-INFO.", ) ], ) elif isinstance(pex, VenvPex): digest = pex.digest process = rule_runner.request( Process, [ VenvPexProcess( pex, argv=["info"], extra_env=dict(PEX_TOOLS="1"), description="Extract PEX-INFO.", ), ], ) else: raise AssertionError(f"Expected a Pex or a VenvPex but got a {type(pex)}.") rule_runner.scheduler.write_digest(digest) pex_path = os.path.join(rule_runner.build_root, "test.pex") result = rule_runner.request(ProcessResult, [process]) pex_info_content = result.stdout.decode() with zipfile.ZipFile(pex_path, "r") as zipfp: pex_list = zipfp.namelist() return { "pex": pex, "local_path": pex_path, "info": json.loads(pex_info_content), "files": pex_list, }