async def create_python_repl_request(repl: PythonRepl, pex_env: PexEnvironment) -> ReplRequest: requirements_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (tgt.address for tgt in repl.targets), internal_only=True), ) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(repl.targets, include_files=True)) requirements_pex, sources = await MultiGet(requirements_request, sources_request) merged_digest = await Get( Digest, MergeDigests( (requirements_pex.digest, sources.source_files.snapshot.digest))) args = pex_env.create_argv(repl.in_chroot(requirements_pex.name), python=requirements_pex.python) chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots] extra_env = { **pex_env.environment_dict(python_configured=requirements_pex.python is not None), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), } return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
async def create_ipython_repl_request(repl: IPythonRepl, ipython: IPython, pex_env: PexEnvironment) -> ReplRequest: # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) # so that we can get the interpreter constraints for use in ipython_request. requirements_pex_request = await Get( PexRequest, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (tgt.address for tgt in repl.targets), internal_only=True), ) requirements_request = Get(Pex, PexRequest, requirements_pex_request) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(repl.targets, include_files=True)) ipython_request = Get( Pex, PexRequest( output_filename="ipython.pex", main=ipython.main, requirements=PexRequirements(ipython.all_requirements), interpreter_constraints=requirements_pex_request. interpreter_constraints, internal_only=True, ), ) requirements_pex, sources, ipython_pex = await MultiGet( requirements_request, sources_request, ipython_request) merged_digest = await Get( Digest, MergeDigests( (requirements_pex.digest, sources.source_files.snapshot.digest, ipython_pex.digest)), ) args = list( pex_env.create_argv(repl.in_chroot(ipython_pex.name), python=ipython_pex.python)) if ipython.options.ignore_cwd: args.append("--ignore-cwd") chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots] extra_env = { **pex_env.environment_dict(python_configured=ipython_pex.python is not None), "PEX_PATH": repl.in_chroot(requirements_pex_request.output_filename), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), } return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
async def export_venv(request: ExportedVenvRequest, python_setup: PythonSetup, pex_env: PexEnvironment) -> ExportableData: # Pick a single interpreter for the venv. interpreter_constraints = InterpreterConstraints.create_from_targets( request.targets, python_setup) if not interpreter_constraints: # If there were no targets that defined any constraints, fall back to the global ones. interpreter_constraints = InterpreterConstraints( python_setup.interpreter_constraints) min_interpreter = interpreter_constraints.snap_to_minimum( python_setup.interpreter_universe) if not min_interpreter: raise ExportError( "The following interpreter constraints were computed for all the targets for which " f"export was requested: {interpreter_constraints}. There is no python interpreter " "compatible with these constraints. Please restrict the target set to one that shares " "a compatible interpreter.") venv_pex = await Get( VenvPex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (tgt.address for tgt in request.targets), internal_only=True, hardcoded_interpreter_constraints=min_interpreter, ), ) complete_pex_env = pex_env.in_workspace() venv_abspath = os.path.join(complete_pex_env.pex_root, venv_pex.venv_rel_dir) # Run the venv_pex to get the full python version (including patch #), so we # can use it in the symlink name. res = await Get( ProcessResult, VenvPexProcess( venv_pex=venv_pex, description="Create virtualenv", argv=[ "-c", "import sys; print('.'.join(str(x) for x in sys.version_info[0:3]))" ], input_digest=venv_pex.digest, ), ) py_version = res.stdout.strip().decode() return ExportableData( f"virtualenv for {min_interpreter}", os.path.join("python", "virtualenv"), symlinks=[Symlink(venv_abspath, py_version)], )
async def create_python_repl_request(repl: PythonRepl, pex_env: PexEnvironment) -> ReplRequest: # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) so # that we can get the interpreter constraints for use in local_dists_request. requirements_pex_request = await Get( PexRequest, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (tgt.address for tgt in repl.targets), internal_only=True), ) requirements_request = Get(Pex, PexRequest, requirements_pex_request) local_dists_request = Get( LocalDistsPex, LocalDistsPexRequest( Addresses(tgt.address for tgt in repl.targets), internal_only=True, interpreter_constraints=requirements_pex_request. interpreter_constraints, ), ) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(repl.targets, include_files=True)) requirements_pex, local_dists, sources = await MultiGet( requirements_request, local_dists_request, sources_request) merged_digest = await Get( Digest, MergeDigests((requirements_pex.digest, local_dists.pex.digest, sources.source_files.snapshot.digest)), ) complete_pex_env = pex_env.in_workspace() args = complete_pex_env.create_argv(repl.in_chroot(requirements_pex.name), python=requirements_pex.python) chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots] extra_env = { **complete_pex_env.environment_dict(python_configured=requirements_pex.python is not None), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), "PEX_PATH": repl.in_chroot(local_dists.pex.name), } return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
async def pylint_lint_partition(partition: PylintPartition, pylint: Pylint) -> LintResult: requirements_pex_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (field_set.address for field_set in partition.field_sets), # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using # a different version for the requirements than the other two PEXes, which can result # in a PEX runtime error about missing dependencies. hardcoded_interpreter_constraints=partition.interpreter_constraints, internal_only=True, direct_deps_only=True, ), ) plugin_requirements = PexRequirements.create_from_requirement_fields( plugin_tgt[PythonRequirementsField] for plugin_tgt in partition.plugin_targets if plugin_tgt.has_field(PythonRequirementsField) ) # Right now any Pylint transitive requirements will shadow corresponding user # requirements, which could lead to problems. pylint_pex_request = Get( Pex, PexRequest( output_filename="pylint.pex", internal_only=True, requirements=PexRequirements([*pylint.all_requirements, *plugin_requirements]), entry_point=pylint.entry_point, interpreter_constraints=partition.interpreter_constraints, # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 additional_args=("--pex-path", requirements_pex_request.input.output_filename), ), ) config_digest_request = Get( Digest, PathGlobs( globs=[pylint.config] if pylint.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", ), ) prepare_plugin_sources_request = Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(partition.plugin_targets) ) prepare_python_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies) ) field_set_sources_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets) ) ( pylint_pex, requirements_pex, config_digest, prepared_plugin_sources, prepared_python_sources, field_set_sources, ) = await MultiGet( pylint_pex_request, requirements_pex_request, config_digest_request, prepare_plugin_sources_request, prepare_python_sources_request, field_set_sources_request, ) prefixed_plugin_sources = ( await Get( Digest, AddPrefix(prepared_plugin_sources.stripped_source_files.snapshot.digest, "__plugins"), ) if pylint.source_plugins else EMPTY_DIGEST ) pythonpath = list(prepared_python_sources.source_roots) if pylint.source_plugins: # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must # point to the plugin's directory, rather than to a parent's directory, because # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but # not `path.to.plugin`. (This means users must have specified the parent directory as a # source root.) pythonpath.append("__plugins") input_digest = await Get( Digest, MergeDigests( ( pylint_pex.digest, requirements_pex.digest, config_digest, prefixed_plugin_sources, prepared_python_sources.source_files.snapshot.digest, ) ), ) result = await Get( FallibleProcessResult, PexProcess( pylint_pex, argv=generate_args(source_files=field_set_sources, pylint=pylint), input_digest=input_digest, extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)}, description=f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return LintResult.from_fallible_process_result( result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints)) )
async def pylint_lint_partition( partition: PylintPartition, pylint: Pylint, first_party_plugins: PylintFirstPartyPlugins ) -> LintResult: requirements_pex_get = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (field_set.address for field_set in partition.field_sets), # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using # a different version for the requirements than the other two PEXes, which can result # in a PEX runtime error about missing dependencies. hardcoded_interpreter_constraints=partition.interpreter_constraints, internal_only=True, direct_deps_only=True, ), ) pylint_pex_get = Get( Pex, PexRequest( output_filename="pylint.pex", internal_only=True, requirements=pylint.pex_requirements( extra_requirements=first_party_plugins.requirement_strings, ), interpreter_constraints=partition.interpreter_constraints, ), ) prepare_python_sources_get = Get( PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies) ) field_set_sources_get = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets) ) pylint_pex, requirements_pex, prepared_python_sources, field_set_sources = await MultiGet( pylint_pex_get, requirements_pex_get, prepare_python_sources_get, field_set_sources_get, ) pylint_runner_pex, config_files = await MultiGet( Get( VenvPex, PexRequest( output_filename="pylint_runner.pex", interpreter_constraints=partition.interpreter_constraints, main=pylint.main, internal_only=True, pex_path=[pylint_pex, requirements_pex], ), ), Get( ConfigFiles, ConfigFilesRequest, pylint.config_request(field_set_sources.snapshot.dirs) ), ) pythonpath = list(prepared_python_sources.source_roots) if first_party_plugins: pythonpath.append(first_party_plugins.PREFIX) input_digest = await Get( Digest, MergeDigests( ( config_files.snapshot.digest, first_party_plugins.sources_digest, prepared_python_sources.source_files.snapshot.digest, ) ), ) result = await Get( FallibleProcessResult, VenvPexProcess( pylint_runner_pex, argv=generate_argv(field_set_sources, pylint), input_digest=input_digest, extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)}, description=f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return LintResult.from_fallible_process_result( result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints)) )
async def setup_pytest_for_target( request: TestSetupRequest, pytest: PyTest, test_subsystem: TestSubsystem, python_setup: PythonSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, test_extra_env: TestExtraEnv, global_options: GlobalOptions, ) -> TestSetup: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.field_set.address]) ) all_targets = transitive_targets.closure interpreter_constraints = PexInterpreterConstraints.create_from_targets( all_targets, python_setup ) requirements_pex_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements([request.field_set.address], internal_only=True), ) pytest_pex_request = Get( Pex, PexRequest( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), interpreter_constraints=interpreter_constraints, internal_only=True, ), ) prepared_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True) ) # Create any assets that the test depends on through the `runtime_package_dependencies` field. assets: Tuple[BuiltPackage, ...] = () unparsed_runtime_packages = ( request.field_set.runtime_package_dependencies.to_unparsed_address_inputs() ) if unparsed_runtime_packages.values: runtime_package_targets = await Get( Targets, UnparsedAddressInputs, unparsed_runtime_packages ) field_sets_per_target = await Get( FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, runtime_package_targets), ) assets = await MultiGet( Get(BuiltPackage, PackageFieldSet, field_set) for field_set in field_sets_per_target.field_sets ) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. field_set_source_files_request = Get( SourceFiles, SourceFilesRequest([request.field_set.sources]) ) pytest_pex, requirements_pex, prepared_sources, field_set_source_files = await MultiGet( pytest_pex_request, requirements_pex_request, prepared_sources_request, field_set_source_files_request, ) pytest_runner_pex = await Get( VenvPex, PexRequest( output_filename="pytest_runner.pex", interpreter_constraints=interpreter_constraints, # TODO(John Sirois): Switch to ConsoleScript once Pex supports discovering console # scripts via the PEX_PATH: https://github.com/pantsbuild/pex/issues/1257 main=EntryPoint("pytest"), internal_only=True, pex_path=[pytest_pex, requirements_pex], ), ) input_digest = await Get( Digest, MergeDigests( ( coverage_config.digest, prepared_sources.source_files.snapshot.digest, *(binary.digest for binary in assets), ) ), ) add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"] output_files = [] results_file_name = None if pytest.options.junit_xml_dir and not request.is_debug: results_file_name = f"{request.field_set.address.path_safe_spec}.xml" add_opts.extend( (f"--junitxml={results_file_name}", "-o", f"junit_family={pytest.options.junit_family}") ) output_files.append(results_file_name) coverage_args = [] if test_subsystem.use_coverage and not request.is_debug: output_files.append(".coverage") cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else (".",) coverage_args = [ "--cov-report=", # Turn off output. *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths), ] extra_env = { "PYTEST_ADDOPTS": " ".join(add_opts), "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots), } extra_env.update(test_extra_env.env) # Cache test runs only if they are successful, or not at all if `--test-force`. cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL process = await Get( Process, VenvPexProcess( pytest_runner_pex, argv=(*pytest.options.args, *coverage_args, *field_set_source_files.files), extra_env=extra_env, input_digest=input_digest, output_files=output_files, timeout_seconds=request.field_set.timeout.calculate_from_global_options(pytest), execution_slot_variable=pytest.options.execution_slot_var, description=f"Run Pytest for {request.field_set.address}", level=LogLevel.DEBUG, cache_scope=cache_scope, ), ) return TestSetup(process, results_file_name=results_file_name)
async def mypy_typecheck_partition(partition: MyPyPartition, mypy: MyPy) -> TypecheckResult: plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs, mypy.source_plugins) plugin_transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses) ) plugin_requirements = PexRequirements.create_from_requirement_fields( plugin_tgt[PythonRequirementsField] for plugin_tgt in plugin_transitive_targets.closure if plugin_tgt.has_field(PythonRequirementsField) ) # If the user did not set `--python-version` already, we set it ourselves based on their code's # interpreter constraints. This determines what AST is used by MyPy. python_version = ( None if partition.python_version_already_configured else partition.interpreter_constraints.minimum_python_version() ) # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6, # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if # 3.8+ is not installed on the machine. tool_interpreter_constraints = ( partition.interpreter_constraints if ( mypy.options.is_default("interpreter_constraints") and partition.interpreter_constraints.requires_python38_or_newer() ) else PexInterpreterConstraints(mypy.interpreter_constraints) ) plugin_sources_get = Get( PythonSourceFiles, PythonSourceFilesRequest(plugin_transitive_targets.closure) ) closure_sources_get = Get(PythonSourceFiles, PythonSourceFilesRequest(partition.closure)) roots_sources_get = Get( SourceFiles, SourceFilesRequest(tgt.get(PythonSources) for tgt in partition.root_targets) ) requirements_pex_get = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (tgt.address for tgt in partition.root_targets), hardcoded_interpreter_constraints=partition.interpreter_constraints, internal_only=True, ), ) # TODO(John Sirois): Scope the extra requirements to the partition. # Right now we just use a global set of extra requirements and these might not be compatible # with all partitions. See: https://github.com/pantsbuild/pants/issues/11556 mypy_extra_requirements_pex_get = Get( Pex, PexRequest( output_filename="mypy_extra_requirements.pex", internal_only=True, requirements=PexRequirements(mypy.extra_requirements), interpreter_constraints=partition.interpreter_constraints, ), ) mypy_pex_get = Get( VenvPex, PexRequest( output_filename="mypy.pex", internal_only=True, main=mypy.main, requirements=PexRequirements((*mypy.all_requirements, *plugin_requirements)), interpreter_constraints=tool_interpreter_constraints, ), ) config_files_get = Get(ConfigFiles, ConfigFilesRequest, mypy.config_request) ( plugin_sources, closure_sources, roots_sources, mypy_pex, requirements_pex, mypy_extra_requirements_pex, config_files, ) = await MultiGet( plugin_sources_get, closure_sources_get, roots_sources_get, mypy_pex_get, requirements_pex_get, mypy_extra_requirements_pex_get, config_files_get, ) python_files = determine_python_files(roots_sources.snapshot.files) file_list_path = "__files.txt" file_list_digest_request = Get( Digest, CreateDigest([FileContent(file_list_path, "\n".join(python_files).encode())]), ) typechecked_venv_pex_request = Get( VenvPex, PexRequest( output_filename="typechecked_venv.pex", internal_only=True, pex_path=[requirements_pex, mypy_extra_requirements_pex], interpreter_constraints=partition.interpreter_constraints, ), ) typechecked_venv_pex, file_list_digest = await MultiGet( typechecked_venv_pex_request, file_list_digest_request ) merged_input_files = await Get( Digest, MergeDigests( [ file_list_digest, plugin_sources.source_files.snapshot.digest, closure_sources.source_files.snapshot.digest, typechecked_venv_pex.digest, config_files.snapshot.digest, ] ), ) all_used_source_roots = sorted( set(itertools.chain(plugin_sources.source_roots, closure_sources.source_roots)) ) env = { "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots), "MYPYPATH": ":".join(all_used_source_roots), } result = await Get( FallibleProcessResult, VenvPexProcess( mypy_pex, argv=generate_argv( mypy, typechecked_venv_pex, file_list_path=file_list_path, python_version=python_version, ), input_digest=merged_input_files, extra_env=env, description=f"Run MyPy on {pluralize(len(python_files), 'file')}.", level=LogLevel.DEBUG, ), ) return TypecheckResult.from_fallible_process_result( result, partition_description=str(sorted(str(c) for c in partition.interpreter_constraints)) )
async def create_pex_binary_run_request( field_set: PexBinaryFieldSet, pex_binary_defaults: PexBinaryDefaults, pex_env: PexEnvironment, ) -> RunRequest: entry_point, transitive_targets = await MultiGet( Get( ResolvedPexEntryPoint, ResolvePexEntryPointRequest(field_set.entry_point), ), Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address])), ) # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) # so that we can get the interpreter constraints for use in runner_pex_request. requirements_pex_request = await Get( PexRequest, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements([field_set.address], internal_only=True), ) requirements_request = Get(Pex, PexRequest, requirements_pex_request) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure, include_files=True)) output_filename = f"{field_set.address.target_name}.pex" runner_pex_request = Get( Pex, PexRequest( output_filename=output_filename, interpreter_constraints=requirements_pex_request. interpreter_constraints, additional_args=field_set.generate_additional_args( pex_binary_defaults), internal_only=True, # Note that the entry point file is not in the PEX itself. It's loaded by setting # `PEX_EXTRA_SYS_PATH`. # TODO(John Sirois): Support ConsoleScript in PexBinary targets: # https://github.com/pantsbuild/pants/issues/11619 main=entry_point.val, ), ) requirements, sources, runner_pex = await MultiGet(requirements_request, sources_request, runner_pex_request) merged_digest = await Get( Digest, MergeDigests([ requirements.digest, sources.source_files.snapshot.digest, runner_pex.digest ]), ) def in_chroot(relpath: str) -> str: return os.path.join("{chroot}", relpath) args = pex_env.create_argv(in_chroot(runner_pex.name), python=runner_pex.python) chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots] extra_env = { **pex_env.environment_dict(python_configured=runner_pex.python is not None), "PEX_PATH": in_chroot(requirements_pex_request.output_filename), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), } return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
async def setup_pytest_for_target( request: TestSetupRequest, pytest: PyTest, test_subsystem: TestSubsystem, python_setup: PythonSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, test_extra_env: TestExtraEnv, global_options: GlobalOptions, ) -> TestSetup: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.field_set.address])) all_targets = transitive_targets.closure interpreter_constraints = PexInterpreterConstraints.create_from_targets( all_targets, python_setup) requirements_pex_get = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements([request.field_set.address], internal_only=True), ) pytest_pex_get = Get( Pex, PexRequest( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), interpreter_constraints=interpreter_constraints, internal_only=True, ), ) extra_output_directory_digest_get = Get( Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)])) prepared_sources_get = Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)) build_package_dependencies_get = Get( BuiltPackageDependencies, BuildPackageDependenciesRequest( request.field_set.runtime_package_dependencies), ) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. field_set_source_files_get = Get( SourceFiles, SourceFilesRequest([request.field_set.sources])) ( pytest_pex, requirements_pex, prepared_sources, field_set_source_files, built_package_dependencies, extra_output_directory_digest, ) = await MultiGet( pytest_pex_get, requirements_pex_get, prepared_sources_get, field_set_source_files_get, build_package_dependencies_get, extra_output_directory_digest_get, ) pytest_runner_pex_get = Get( VenvPex, PexRequest( output_filename="pytest_runner.pex", interpreter_constraints=interpreter_constraints, main=ConsoleScript("pytest"), internal_only=True, pex_path=[pytest_pex, requirements_pex], ), ) config_files_get = Get( ConfigFiles, ConfigFilesRequest, pytest.config_request(field_set_source_files.snapshot.dirs), ) pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get, config_files_get) input_digest = await Get( Digest, MergeDigests(( coverage_config.digest, prepared_sources.source_files.snapshot.digest, config_files.snapshot.digest, extra_output_directory_digest, *(pkg.digest for pkg in built_package_dependencies), )), ) add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"] output_files = [] results_file_name = None if pytest.options.junit_xml_dir and not request.is_debug: results_file_name = f"{request.field_set.address.path_safe_spec}.xml" add_opts.extend((f"--junitxml={results_file_name}", "-o", f"junit_family={pytest.options.junit_family}")) output_files.append(results_file_name) coverage_args = [] if test_subsystem.use_coverage and not request.is_debug: output_files.append(".coverage") cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else ( ".", ) coverage_args = [ "--cov-report=", # Turn off output. f"--cov-config={coverage_config.path}", *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths), ] extra_env = { "PYTEST_ADDOPTS": " ".join(add_opts), "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots), **test_extra_env.env, } # Cache test runs only if they are successful, or not at all if `--test-force`. cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL process = await Get( Process, VenvPexProcess( pytest_runner_pex, argv=(*pytest.options.args, *coverage_args, *field_set_source_files.files), extra_env=extra_env, input_digest=input_digest, output_directories=(_EXTRA_OUTPUT_DIR, ), output_files=output_files, timeout_seconds=request.field_set.timeout. calculate_from_global_options(pytest), execution_slot_variable=pytest.options.execution_slot_var, description=f"Run Pytest for {request.field_set.address}", level=LogLevel.DEBUG, cache_scope=cache_scope, ), ) return TestSetup(process, results_file_name=results_file_name)
async def mypy_typecheck_partition( partition: MyPyPartition, config_file: MyPyConfigFile, first_party_plugins: MyPyFirstPartyPlugins, mypy: MyPy, python_setup: PythonSetup, ) -> CheckResult: # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6, # and 3.7. However, typed-ast does not understand 3.8+, so instead we must run MyPy with # Python 3.8+ when relevant. We only do this if <3.8 can't be used, as we don't want a # loose requirement like `>=3.6` to result in requiring Python 3.8+, which would error if # 3.8+ is not installed on the machine. tool_interpreter_constraints = (partition.interpreter_constraints if ( mypy.options.is_default("interpreter_constraints") and partition.interpreter_constraints.requires_python38_or_newer( python_setup.interpreter_universe)) else mypy.interpreter_constraints) closure_sources_get = Get(PythonSourceFiles, PythonSourceFilesRequest(partition.closure)) roots_sources_get = Get( SourceFiles, SourceFilesRequest( tgt.get(PythonSources) for tgt in partition.root_targets)) # See `requirements_venv_pex` for how this will get wrapped in a `VenvPex`. requirements_pex_get = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (tgt.address for tgt in partition.root_targets), hardcoded_interpreter_constraints=partition. interpreter_constraints, internal_only=True, ), ) mypy_pex_get = Get( VenvPex, PexRequest( output_filename="mypy.pex", internal_only=True, main=mypy.main, requirements=mypy.pex_requirements( extra_requirements=first_party_plugins.requirement_strings, ), interpreter_constraints=tool_interpreter_constraints, ), ) closure_sources, roots_sources, mypy_pex, requirements_pex = await MultiGet( closure_sources_get, roots_sources_get, mypy_pex_get, requirements_pex_get) python_files = determine_python_files(roots_sources.snapshot.files) file_list_path = "__files.txt" file_list_digest_request = Get( Digest, CreateDigest( [FileContent(file_list_path, "\n".join(python_files).encode())]), ) # This creates a venv with all the 3rd-party requirements used by the code. We tell MyPy to # use this venv by setting `--python-executable`. Note that this Python interpreter is # different than what we run MyPy with. # # We could have directly asked the `PexFromTargetsRequest` to return a `VenvPex`, rather than # `Pex`, but that would mean missing out on sharing a cache with other goals like `test` and # `run`. requirements_venv_pex_request = Get( VenvPex, PexRequest( output_filename="requirements_venv.pex", internal_only=True, pex_path=[requirements_pex], interpreter_constraints=partition.interpreter_constraints, ), ) requirements_venv_pex, file_list_digest = await MultiGet( requirements_venv_pex_request, file_list_digest_request) merged_input_files = await Get( Digest, MergeDigests([ file_list_digest, first_party_plugins.sources_digest, closure_sources.source_files.snapshot.digest, requirements_venv_pex.digest, config_file.digest, ]), ) all_used_source_roots = sorted( set( itertools.chain(first_party_plugins.source_roots, closure_sources.source_roots))) env = { "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots), "MYPYPATH": ":".join(all_used_source_roots), } result = await Get( FallibleProcessResult, VenvPexProcess( mypy_pex, argv=generate_argv( mypy, venv_python=requirements_venv_pex.python.argv0, file_list_path=file_list_path, python_version=config_file.python_version_to_autoset( partition.interpreter_constraints, python_setup.interpreter_universe), ), input_digest=merged_input_files, extra_env=env, output_directories=(REPORT_DIR, ), description=f"Run MyPy on {pluralize(len(python_files), 'file')}.", level=LogLevel.DEBUG, ), ) report = await Get(Digest, RemovePrefix(result.output_digest, REPORT_DIR)) return CheckResult.from_fallible_process_result( result, partition_description=str( sorted(str(c) for c in partition.interpreter_constraints)), report=report, )
async def mypy_typecheck_partition(partition: MyPyPartition, mypy: MyPy) -> TypecheckResult: plugin_target_addresses = await Get(Addresses, UnparsedAddressInputs, mypy.source_plugins) plugin_transitive_targets_request = Get( TransitiveTargets, TransitiveTargetsRequest(plugin_target_addresses)) plugin_transitive_targets, launcher_script = await MultiGet( plugin_transitive_targets_request, Get(Digest, CreateDigest([LAUNCHER_FILE]))) plugin_requirements = PexRequirements.create_from_requirement_fields( plugin_tgt[PythonRequirementsField] for plugin_tgt in plugin_transitive_targets.closure if plugin_tgt.has_field(PythonRequirementsField)) # If the user did not set `--python-version` already, we set it ourselves based on their code's # interpreter constraints. This determines what AST is used by MyPy. python_version = ( None if partition.python_version_already_configured else partition.interpreter_constraints.minimum_python_version()) # MyPy requires 3.5+ to run, but uses the typed-ast library to work with 2.7, 3.4, 3.5, 3.6, # and 3.7. However, typed-ast does not understand 3.8, so instead we must run MyPy with # Python 3.8 when relevant. We only do this if <3.8 can't be used, as we don't want a # loose requirement like `>=3.6` to result in requiring Python 3.8, which would error if # 3.8 is not installed on the machine. tool_interpreter_constraints = PexInterpreterConstraints(( "CPython>=3.8", ) if ( mypy.options.is_default("interpreter_constraints") and partition.interpreter_constraints.requires_python38_or_newer() ) else mypy.interpreter_constraints) plugin_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(plugin_transitive_targets.closure)) typechecked_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(partition.closure)) # Normally, this `requirements.pex` would be merged with mypy.pex via `--pex-path`. However, # this will cause a runtime error if the interpreter constraints are different between the # PEXes and they have incompatible wheels. # # Instead, we teach MyPy about the requirements by extracting the distributions from # requirements.pex and setting EXTRACTED_WHEELS, which our custom launcher script then # looks for. # # Conventionally, MyPy users might instead set `MYPYPATH` for this. However, doing this # results in type checking the requirements themselves. requirements_pex_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (addr for addr in partition.field_set_addresses), hardcoded_interpreter_constraints=partition. interpreter_constraints, internal_only=True, ), ) mypy_pex_request = Get( Pex, PexRequest( output_filename="mypy.pex", internal_only=True, sources=launcher_script, requirements=PexRequirements( itertools.chain(mypy.all_requirements, plugin_requirements)), interpreter_constraints=tool_interpreter_constraints, entry_point=PurePath(LAUNCHER_FILE.path).stem, ), ) config_digest_request = Get(Digest, PathGlobs, config_path_globs(mypy)) ( plugin_sources, typechecked_sources, mypy_pex, requirements_pex, config_digest, ) = await MultiGet( plugin_sources_request, typechecked_sources_request, mypy_pex_request, requirements_pex_request, config_digest_request, ) typechecked_srcs_snapshot = typechecked_sources.source_files.snapshot file_list_path = "__files.txt" python_files = "\n".join( determine_python_files( typechecked_sources.source_files.snapshot.files)) create_file_list_request = Get( Digest, CreateDigest([FileContent(file_list_path, python_files.encode())]), ) file_list_digest, extracted_pex_distributions = await MultiGet( create_file_list_request, Get(ExtractedPexDistributions, Pex, requirements_pex)) merged_input_files = await Get( Digest, MergeDigests([ file_list_digest, plugin_sources.source_files.snapshot.digest, typechecked_srcs_snapshot.digest, mypy_pex.digest, extracted_pex_distributions.digest, config_digest, ]), ) all_used_source_roots = sorted( set( itertools.chain(plugin_sources.source_roots, typechecked_sources.source_roots))) env = { "PEX_EXTRA_SYS_PATH": ":".join(all_used_source_roots), "EXTRACTED_WHEELS": ":".join(extracted_pex_distributions.wheel_directory_paths), } result = await Get( FallibleProcessResult, PexProcess( mypy_pex, argv=generate_argv(mypy, file_list_path=file_list_path, python_version=python_version), input_digest=merged_input_files, extra_env=env, description= f"Run MyPy on {pluralize(len(typechecked_srcs_snapshot.files), 'file')}.", level=LogLevel.DEBUG, ), ) return TypecheckResult.from_fallible_process_result( result, partition_description=str( sorted(str(c) for c in partition.interpreter_constraints)))
async def setup_pytest_for_target( request: TestSetupRequest, pytest: PyTest, test_subsystem: TestSubsystem, python_setup: PythonSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, test_extra_env: TestExtraEnv, global_options: GlobalOptions, complete_env: CompleteEnvironment, ) -> TestSetup: transitive_targets, plugin_setups = await MultiGet( Get(TransitiveTargets, TransitiveTargetsRequest([request.field_set.address])), Get(AllPytestPluginSetups, AllPytestPluginSetupsRequest(request.field_set.address)), ) all_targets = transitive_targets.closure interpreter_constraints = InterpreterConstraints.create_from_targets( all_targets, python_setup) requirements_pex_get = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( [request.field_set.address], internal_only=True, resolve_and_lockfile=request.field_set.resolve. resolve_and_lockfile(python_setup), ), ) pytest_pex_get = Get( Pex, PexRequest( output_filename="pytest.pex", requirements=pytest.pex_requirements(), interpreter_constraints=interpreter_constraints, internal_only=True, ), ) # Ensure that the empty extra output dir exists. extra_output_directory_digest_get = Get( Digest, CreateDigest([Directory(_EXTRA_OUTPUT_DIR)])) prepared_sources_get = Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. field_set_source_files_get = Get( SourceFiles, SourceFilesRequest([request.field_set.sources])) ( pytest_pex, requirements_pex, prepared_sources, field_set_source_files, extra_output_directory_digest, ) = await MultiGet( pytest_pex_get, requirements_pex_get, prepared_sources_get, field_set_source_files_get, extra_output_directory_digest_get, ) local_dists = await Get( LocalDistsPex, LocalDistsPexRequest( [request.field_set.address], interpreter_constraints=interpreter_constraints, sources=prepared_sources, ), ) pytest_runner_pex_get = Get( VenvPex, PexRequest( output_filename="pytest_runner.pex", interpreter_constraints=interpreter_constraints, main=pytest.main, internal_only=True, pex_path=[pytest_pex, requirements_pex, local_dists.pex], ), ) config_files_get = Get( ConfigFiles, ConfigFilesRequest, pytest.config_request(field_set_source_files.snapshot.dirs), ) pytest_runner_pex, config_files = await MultiGet(pytest_runner_pex_get, config_files_get) input_digest = await Get( Digest, MergeDigests(( coverage_config.digest, local_dists.remaining_sources.source_files.snapshot.digest, config_files.snapshot.digest, extra_output_directory_digest, *(plugin_setup.digest for plugin_setup in plugin_setups), )), ) add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"] output_files = [] results_file_name = None if pytest.options.junit_xml_dir and not request.is_debug: results_file_name = f"{request.field_set.address.path_safe_spec}.xml" add_opts.extend((f"--junitxml={results_file_name}", "-o", f"junit_family={pytest.options.junit_family}")) output_files.append(results_file_name) coverage_args = [] if test_subsystem.use_coverage and not request.is_debug: pytest.validate_pytest_cov_included() output_files.append(".coverage") if coverage_subsystem.filter: cov_args = [f"--cov={morf}" for morf in coverage_subsystem.filter] else: # N.B.: Passing `--cov=` or `--cov=.` to communicate "record coverage for all sources" # fails in certain contexts as detailed in: # https://github.com/pantsbuild/pants/issues/12390 # Instead we focus coverage on just the directories containing python source files # materialized to the Process chroot. cov_args = [ f"--cov={source_root}" for source_root in prepared_sources.source_roots ] coverage_args = [ "--cov-report=", # Turn off output. f"--cov-config={coverage_config.path}", *cov_args, ] extra_env = { "PYTEST_ADDOPTS": " ".join(add_opts), "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots), **test_extra_env.env, # NOTE: `complete_env` intentionally after `test_extra_env` to allow overriding within # `python_tests` **complete_env.get_subset(request.field_set.extra_env_vars.value or ()), } # Cache test runs only if they are successful, or not at all if `--test-force`. cache_scope = (ProcessCacheScope.PER_SESSION if test_subsystem.force else ProcessCacheScope.SUCCESSFUL) process = await Get( Process, VenvPexProcess( pytest_runner_pex, argv=(*pytest.options.args, *coverage_args, *field_set_source_files.files), extra_env=extra_env, input_digest=input_digest, output_directories=(_EXTRA_OUTPUT_DIR, ), output_files=output_files, timeout_seconds=request.field_set.timeout. calculate_from_global_options(pytest), execution_slot_variable=pytest.options.execution_slot_var, description=f"Run Pytest for {request.field_set.address}", level=LogLevel.DEBUG, cache_scope=cache_scope, ), ) return TestSetup(process, results_file_name=results_file_name)
async def create_python_binary_run_request( field_set: PythonBinaryFieldSet, python_binary_defaults: PythonBinaryDefaults, pex_env: PexEnvironment, ) -> RunRequest: entry_point = field_set.entry_point.value if entry_point is None: binary_source_paths = await Get( Paths, PathGlobs, field_set.sources.path_globs(FilesNotFoundBehavior.error)) if len(binary_source_paths.files) != 1: raise InvalidFieldException( "No `entry_point` was set for the target " f"{repr(field_set.address)}, so it must have exactly one source, but it has " f"{len(binary_source_paths.files)}") entry_point_path = binary_source_paths.files[0] source_root = await Get( SourceRoot, SourceRootRequest, SourceRootRequest.for_file(entry_point_path), ) entry_point = PythonBinarySources.translate_source_file_to_entry_point( os.path.relpath(entry_point_path, source_root.path)) transitive_targets = await Get(TransitiveTargets, Addresses([field_set.address])) # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) # so that we can get the interpreter constraints for use in runner_pex_request. requirements_pex_request = await Get( PexRequest, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements([field_set.address], internal_only=True), ) requirements_request = Get(Pex, PexRequest, requirements_pex_request) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure, include_files=True)) output_filename = f"{field_set.address.target_name}.pex" runner_pex_request = Get( Pex, PexRequest( output_filename=output_filename, interpreter_constraints=requirements_pex_request. interpreter_constraints, additional_args=field_set.generate_additional_args( python_binary_defaults), internal_only=True, # Note that the entry point file is not in the Pex itself, but on the # PEX_PATH. This works fine! entry_point=entry_point, ), ) requirements, sources, runner_pex = await MultiGet(requirements_request, sources_request, runner_pex_request) merged_digest = await Get( Digest, MergeDigests([ requirements.digest, sources.source_files.snapshot.digest, runner_pex.digest ]), ) def in_chroot(relpath: str) -> str: return os.path.join("{chroot}", relpath) args = pex_env.create_argv(in_chroot(runner_pex.name), python=runner_pex.python) chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots] extra_env = { **pex_env.environment_dict(python_configured=runner_pex.python is not None), "PEX_PATH": in_chroot(requirements_pex_request.output_filename), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), } return RunRequest(digest=merged_digest, args=args, extra_env=extra_env)
async def setup_pytest_for_target( request: TestSetupRequest, pytest: PyTest, test_subsystem: TestSubsystem, python_setup: PythonSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, test_extra_env: TestExtraEnv, global_options: GlobalOptions, ) -> TestSetup: transitive_targets = await Get( TransitiveTargets, TransitiveTargetsRequest([request.field_set.address])) all_targets = transitive_targets.closure interpreter_constraints = PexInterpreterConstraints.create_from_targets( all_targets, python_setup) # Defaults to zip_safe=False. requirements_pex_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements([request.field_set.address], internal_only=True), ) pytest_pex_request = Get( Pex, PexRequest( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), interpreter_constraints=interpreter_constraints, entry_point="pytest:main", internal_only=True, additional_args=( # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses # `importlib_metadata` and thus `zipp`, does not play nicely when doing import # magic directly from zip files. `zipp` has pathologically bad behavior with large # zipfiles. # TODO: this does have a performance cost as the pex must now be expanded to disk. # Long term, it would be better to fix Zipp (whose fix would then need to be used # by importlib_metadata and then by Pytest). See # https://github.com/jaraco/zipp/pull/26. "--not-zip-safe", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 "--pex-path", requirements_pex_request.input.output_filename, ), ), ) prepared_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)) # Create any assets that the test depends on through the `runtime_package_dependencies` field. assets: Tuple[BuiltPackage, ...] = () unparsed_runtime_packages = (request.field_set.runtime_package_dependencies .to_unparsed_address_inputs()) if unparsed_runtime_packages.values: runtime_package_targets = await Get(Targets, UnparsedAddressInputs, unparsed_runtime_packages) field_sets_per_target = await Get( FieldSetsPerTarget, FieldSetsPerTargetRequest(PackageFieldSet, runtime_package_targets), ) assets = await MultiGet( Get(BuiltPackage, PackageFieldSet, field_set) for field_set in field_sets_per_target.field_sets) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. field_set_source_files_request = Get( SourceFiles, SourceFilesRequest([request.field_set.sources])) pytest_pex, requirements_pex, prepared_sources, field_set_source_files = await MultiGet( pytest_pex_request, requirements_pex_request, prepared_sources_request, field_set_source_files_request, ) input_digest = await Get( Digest, MergeDigests(( coverage_config.digest, prepared_sources.source_files.snapshot.digest, requirements_pex.digest, pytest_pex.digest, *(binary.digest for binary in assets), )), ) add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"] output_files = [] results_file_name = None if pytest.options.junit_xml_dir and not request.is_debug: results_file_name = f"{request.field_set.address.path_safe_spec}.xml" add_opts.extend((f"--junitxml={results_file_name}", "-o", f"junit_family={pytest.options.junit_family}")) output_files.append(results_file_name) coverage_args = [] if test_subsystem.use_coverage and not request.is_debug: output_files.append(".coverage") cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else ( ".", ) coverage_args = [ "--cov-report=", # Turn off output. *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths), ] extra_env = { "PYTEST_ADDOPTS": " ".join(add_opts), "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots), } extra_env.update(test_extra_env.env) # Cache test runs only if they are successful, or not at all if `--test-force`. cache_scope = ProcessCacheScope.NEVER if test_subsystem.force else ProcessCacheScope.SUCCESSFUL process = await Get( Process, PexProcess( pytest_pex, argv=(*pytest.options.args, *coverage_args, *field_set_source_files.files), extra_env=extra_env, input_digest=input_digest, output_files=output_files, timeout_seconds=request.field_set.timeout. calculate_from_global_options(pytest), execution_slot_variable=pytest.options.execution_slot_var, description=f"Run Pytest for {request.field_set.address}", level=LogLevel.DEBUG, cache_scope=cache_scope, ), ) return TestSetup(process, results_file_name=results_file_name)
async def pylint_lint_partition(partition: PylintPartition, pylint: Pylint) -> LintResult: requirements_pex_get = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( (field_set.address for field_set in partition.field_sets), # NB: These constraints must be identical to the other PEXes. Otherwise, we risk using # a different version for the requirements than the other two PEXes, which can result # in a PEX runtime error about missing dependencies. hardcoded_interpreter_constraints=partition. interpreter_constraints, internal_only=True, direct_deps_only=True, ), ) plugin_requirements = PexRequirements.create_from_requirement_fields( plugin_tgt[PythonRequirementsField] for plugin_tgt in partition.plugin_targets if plugin_tgt.has_field(PythonRequirementsField)) pylint_pex_get = Get( Pex, PexRequest( output_filename="pylint.pex", internal_only=True, requirements=PexRequirements( [*pylint.all_requirements, *plugin_requirements]), interpreter_constraints=partition.interpreter_constraints, ), ) prepare_plugin_sources_get = Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(partition.plugin_targets)) prepare_python_sources_get = Get( PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies)) field_set_sources_get = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets)) ( pylint_pex, requirements_pex, prepared_plugin_sources, prepared_python_sources, field_set_sources, ) = await MultiGet( pylint_pex_get, requirements_pex_get, prepare_plugin_sources_get, prepare_python_sources_get, field_set_sources_get, ) pylint_runner_pex, config_files = await MultiGet( Get( VenvPex, PexRequest( output_filename="pylint_runner.pex", interpreter_constraints=partition.interpreter_constraints, main=pylint.main, internal_only=True, pex_path=[pylint_pex, requirements_pex], ), ), Get(ConfigFiles, ConfigFilesRequest, pylint.config_request(field_set_sources.snapshot.dirs)), ) prefixed_plugin_sources = (await Get( Digest, AddPrefix( prepared_plugin_sources.stripped_source_files.snapshot.digest, "__plugins"), ) if pylint.source_plugins else EMPTY_DIGEST) pythonpath = list(prepared_python_sources.source_roots) if pylint.source_plugins: # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must # point to the plugin's directory, rather than to a parent's directory, because # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but # not `path.to.plugin`. (This means users must have specified the parent directory as a # source root.) pythonpath.append("__plugins") input_digest = await Get( Digest, MergeDigests(( config_files.snapshot.digest, prefixed_plugin_sources, prepared_python_sources.source_files.snapshot.digest, )), ) result = await Get( FallibleProcessResult, VenvPexProcess( pylint_runner_pex, argv=generate_argv(field_set_sources, pylint), input_digest=input_digest, extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)}, description= f"Run Pylint on {pluralize(len(partition.field_sets), 'file')}.", level=LogLevel.DEBUG, ), ) return LintResult.from_fallible_process_result( result, partition_description=str( sorted(str(c) for c in partition.interpreter_constraints)))