async def create_pex_from_target_closure(request: CreatePexFromTargetClosure, python_setup: PythonSetup) -> Pex: transitive_hydrated_targets = await Get[TransitiveHydratedTargets]( BuildFileAddresses, request.build_file_addresses) all_targets = transitive_hydrated_targets.closure all_target_adaptors = [t.adaptor for t in all_targets] interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=tuple(all_target_adaptors), python_setup=python_setup) if request.include_source_files: chrooted_sources = await Get[ChrootedPythonSources]( HydratedTargets(all_targets)) requirements = PexRequirements.create_from_adaptors( adaptors=all_target_adaptors, additional_requirements=request.additional_requirements) create_pex_request = CreatePex( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, entry_point=request.entry_point, input_files_digest=chrooted_sources.digest if request.include_source_files else None, additional_args=request.additional_args, ) pex = await Get[Pex](CreatePex, create_pex_request) return pex
async def create_pex_from_target_closure(request: CreatePexFromTargetClosure, python_setup: PythonSetup) -> Pex: transitive_hydrated_targets = await Get[TransitiveHydratedTargets]( Addresses, request.addresses) all_targets = transitive_hydrated_targets.closure all_target_adaptors = [t.adaptor for t in all_targets] interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=all_target_adaptors, python_setup=python_setup) input_digests = [] if request.additional_input_files: input_digests.append(request.additional_input_files) if request.include_source_files: chrooted_sources = await Get[ChrootedPythonSources]( HydratedTargets(all_targets)) input_digests.append(chrooted_sources.snapshot.directory_digest) merged_input_digest = await Get[Digest]( DirectoriesToMerge(directories=tuple(input_digests))) requirements = PexRequirements.create_from_adaptors( adaptors=all_target_adaptors, additional_requirements=request.additional_requirements) create_pex_request = CreatePex( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, entry_point=request.entry_point, input_files_digest=merged_input_digest, additional_args=request.additional_args, ) pex = await Get[Pex](CreatePex, create_pex_request) return pex
def create_python_binary(python_binary_adaptor: PythonBinaryAdaptor, python_setup: PythonSetup) -> CreatedBinary: transitive_hydrated_targets = yield Get( TransitiveHydratedTargets, BuildFileAddresses((python_binary_adaptor.address, ))) all_targets = transitive_hydrated_targets.closure all_target_adaptors = [t.adaptor for t in all_targets] interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=tuple(all_targets), python_setup=python_setup) source_root_stripped_sources = yield [ Get(SourceRootStrippedSources, HydratedTarget, target_adaptor) for target_adaptor in all_targets ] #TODO(#8420) This way of calculating the entry point works but is a bit hackish. entry_point = None if hasattr(python_binary_adaptor, 'entry_point'): entry_point = python_binary_adaptor.entry_point else: sources_snapshot = python_binary_adaptor.sources.snapshot if len(sources_snapshot.files) == 1: target = transitive_hydrated_targets.roots[0] output = yield Get(SourceRootStrippedSources, HydratedTarget, target) root_filename = output.snapshot.files[0] entry_point = PythonBinary.translate_source_path_to_py_module_specifier( root_filename) stripped_sources_digests = [ stripped_sources.snapshot.directory_digest for stripped_sources in source_root_stripped_sources ] sources_digest = yield Get( Digest, DirectoriesToMerge(directories=tuple(stripped_sources_digests))) inits_digest = yield Get(InjectedInitDigest, Digest, sources_digest) all_input_digests = [sources_digest, inits_digest.directory_digest] merged_input_files = yield Get( Digest, DirectoriesToMerge, DirectoriesToMerge(directories=tuple(all_input_digests))) requirements = PexRequirements.create_from_adaptors(all_target_adaptors) output_filename = f"{python_binary_adaptor.address.target_name}.pex" create_requirements_pex = CreatePex( output_filename=output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, entry_point=entry_point, input_files_digest=merged_input_files, ) pex = yield Get(Pex, CreatePex, create_requirements_pex) yield CreatedBinary(digest=pex.directory_digest, binary_name=pex.output_filename)
async def lint( wrapped_target: Flake8Target, flake8: Flake8, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if flake8.options.skip: return LintResult.noop() target = wrapped_target.target # NB: Flake8 output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://flake8.pycqa.org/en/latest/user/invocation.html. interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=[target] if isinstance(target, PythonTargetAdaptor) else [], python_setup=python_setup ) config_path: Optional[str] = flake8.options.config config_snapshot = await Get[Snapshot]( PathGlobs(include=tuple([config_path] if config_path else [])) ) requirements_pex = await Get[Pex]( CreatePex( output_filename="flake8.pex", requirements=PexRequirements(requirements=tuple(flake8.get_requirement_specs())), interpreter_constraints=interpreter_constraints, entry_point=flake8.get_entry_point(), ) ) merged_input_files = await Get[Digest]( DirectoriesToMerge( directories=( target.sources.snapshot.directory_digest, requirements_pex.directory_digest, config_snapshot.directory_digest, ) ), ) request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f'./flake8.pex', pex_args=generate_args(wrapped_target, flake8), input_files=merged_input_files, description=f'Run Flake8 for {target.address.reference()}', ) result = await Get[FallibleExecuteProcessResult](ExecuteProcessRequest, request) return LintResult.from_fallible_execute_process_result(result)
async def lint( wrapped_target: BanditTarget, bandit: Bandit, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if bandit.options.skip: return LintResult.noop() target = wrapped_target.target # NB: Bandit output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit. interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=[target] if isinstance(target, PythonTargetAdaptor) else [], python_setup=python_setup) config_path: Optional[str] = bandit.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--bandit-config`", )) requirements_pex = await Get[Pex](CreatePex( output_filename="bandit.pex", requirements=PexRequirements( requirements=tuple(bandit.get_requirement_specs())), interpreter_constraints=interpreter_constraints, entry_point=bandit.get_entry_point(), )) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( target.sources.snapshot.directory_digest, requirements_pex.directory_digest, config_snapshot.directory_digest, )), ) request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f'./bandit.pex', pex_args=generate_args(wrapped_target, bandit), input_files=merged_input_files, description=f'Run Bandit for {target.address.reference()}', ) result = await Get[FallibleExecuteProcessResult](ExecuteProcessRequest, request) return LintResult.from_fallible_execute_process_result(result)
async def create_pex_from_target_closure(request: CreatePexFromTargetClosure, python_setup: PythonSetup) -> Pex: transitive_hydrated_targets = await Get[TransitiveHydratedTargets]( BuildFileAddresses, request.build_file_addresses) all_targets = transitive_hydrated_targets.closure all_target_adaptors = [t.adaptor for t in all_targets] interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=tuple(all_targets), python_setup=python_setup) merged_input_files: Optional[Digest] = None if request.include_source_files: source_root_stripped_sources = await MultiGet( Get[SourceRootStrippedSources](HydratedTarget, target_adaptor) for target_adaptor in all_targets) stripped_sources_digests = [ stripped_sources.snapshot.directory_digest for stripped_sources in source_root_stripped_sources ] sources_digest = await Get[Digest]( DirectoriesToMerge(directories=tuple(stripped_sources_digests))) inits_digest = await Get[InjectedInitDigest](Digest, sources_digest) all_input_digests = [sources_digest, inits_digest.directory_digest] merged_input_files = await Get[Digest]( DirectoriesToMerge, DirectoriesToMerge(directories=tuple(all_input_digests))) requirements = PexRequirements.create_from_adaptors( adaptors=all_target_adaptors, additional_requirements=request.additional_requirements) create_pex_request = CreatePex( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, entry_point=request.entry_point, input_files_digest=merged_input_files, ) pex = await Get[Pex](CreatePex, create_pex_request) return pex
async def legacy_pex_from_targets(request: LegacyPexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_hydrated_targets = await Get[TransitiveHydratedTargets]( Addresses, request.addresses) all_targets = transitive_hydrated_targets.closure python_targets = [ t for t in all_targets if isinstance(t.adaptor, PythonTargetAdaptor) ] resource_targets = [ t for t in all_targets if isinstance(t.adaptor, (FilesAdaptor, ResourcesAdaptor)) ] all_target_adaptors = [t.adaptor for t in all_targets] interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=all_target_adaptors, python_setup=python_setup) input_digests = [] if request.additional_input_files: input_digests.append(request.additional_input_files) if request.include_source_files: prepared_sources = await Get[ImportablePythonSources]( HydratedTargets(python_targets + resource_targets)) input_digests.append(prepared_sources.snapshot.directory_digest) merged_input_digest = await Get[Digest]( DirectoriesToMerge(directories=tuple(input_digests))) requirements = PexRequirements.create_from_adaptors( adaptors=all_target_adaptors, additional_requirements=request.additional_requirements) return PexRequest( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, entry_point=request.entry_point, input_files_digest=merged_input_digest, additional_args=request.additional_args, )
async def lint( linter: Flake8Linter, flake8: Flake8, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if flake8.options.skip: return LintResult.noop() adaptors_with_origins = linter.adaptors_with_origins # NB: Flake8 output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://flake8.pycqa.org/en/latest/user/invocation.html. interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( (adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins), python_setup=python_setup, ) requirements_pex = await Get[Pex](CreatePex( output_filename="flake8.pex", requirements=PexRequirements(flake8.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=flake8.get_entry_point(), )) config_path: Optional[str] = flake8.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--flake8-config`", )) all_source_files = await Get[SourceFiles](AllSourceFilesRequest( adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins)) specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest(adaptors_with_origins)) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( all_source_files.snapshot.directory_digest, requirements_pex.directory_digest, config_snapshot.directory_digest, )), ) address_references = ", ".join( sorted(adaptor_with_origin.adaptor.address.reference() for adaptor_with_origin in adaptors_with_origins)) request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./flake8.pex", pex_args=generate_args(specified_source_files=specified_source_files, flake8=flake8), input_files=merged_input_files, description=f"Run Flake8 for {address_references}", ) result = await Get[FallibleExecuteProcessResult](ExecuteProcessRequest, request) return LintResult.from_fallible_execute_process_result(result)
async def lint( linter: PylintLinter, pylint: Pylint, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if pylint.options.skip: return LintResult.noop() adaptors_with_origins = linter.adaptors_with_origins # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it # doesn't lint those direct dependencies nor does it care about transitive dependencies. hydrated_targets = [ HydratedTarget(adaptor_with_origin.adaptor) for adaptor_with_origin in adaptors_with_origins ] dependencies = await MultiGet( Get[HydratedTarget](Address, dependency) for dependency in itertools.chain.from_iterable( ht.adaptor.dependencies for ht in hydrated_targets)) chrooted_python_sources = await Get[ImportablePythonSources]( HydratedTargets([*hydrated_targets, *dependencies])) # NB: Pylint output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://pylint.pycqa.org/en/latest/faq.html#what-versions-of-python-is-pylint-supporting. interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( (adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins), python_setup=python_setup, ) requirements_pex = await Get[Pex](PexRequest( output_filename="pylint.pex", requirements=PexRequirements(pylint.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=pylint.get_entry_point(), )) config_path: Optional[str] = pylint.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", )) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( requirements_pex.directory_digest, config_snapshot.directory_digest, chrooted_python_sources.snapshot.directory_digest, )), ) specified_source_files = await Get[SourceFiles]( LegacySpecifiedSourceFilesRequest(adaptors_with_origins, strip_source_roots=True)) address_references = ", ".join( sorted(adaptor_with_origin.adaptor.address.reference() for adaptor_with_origin in adaptors_with_origins)) request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./pylint.pex", pex_args=generate_args(specified_source_files=specified_source_files, pylint=pylint), input_files=merged_input_files, description=f"Run Pylint for {address_references}", ) result = await Get[FallibleExecuteProcessResult](ExecuteProcessRequest, request) return LintResult.from_fallible_execute_process_result(result)
async def setup_pytest_for_target( adaptor_with_origin: PythonTestsAdaptorWithOrigin, pytest: PyTest, test_options: TestOptions, python_setup: PythonSetup, ) -> TestTargetSetup: # TODO: Rather than consuming the TestOptions subsystem, the TestRunner should pass on coverage # configuration via #7490. adaptor = adaptor_with_origin.adaptor test_addresses = Addresses((adaptor.address,)) # TODO(John Sirois): PexInterpreterConstraints are gathered in the same way by the # `create_pex_from_target_closure` rule, factor up. transitive_hydrated_targets = await Get[TransitiveHydratedTargets](Addresses, test_addresses) all_targets = transitive_hydrated_targets.closure all_target_adaptors = [t.adaptor for t in all_targets] interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=all_target_adaptors, python_setup=python_setup ) # Ensure all pexes we merge via PEX_PATH to form the test runner use the interpreter constraints # of the tests. This is handled by CreatePexFromTargetClosure, but we must pass this through for # CreatePex requests. create_pex = functools.partial(CreatePex, interpreter_constraints=interpreter_constraints) # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly # from zip files. `zipp` has pathologically bad behavior with large zipfiles. # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term, # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata # and then by Pytest). See https://github.com/jaraco/zipp/pull/26. additional_args_for_pytest = ("--not-zip-safe",) run_coverage = test_options.values.run_coverage plugin_file_digest: Optional[Digest] = ( await Get[Digest](InputFilesContent, get_coverage_plugin_input()) if run_coverage else None ) pytest_pex = await Get[Pex]( CreatePex, create_pex( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), additional_args=additional_args_for_pytest, input_files_digest=plugin_file_digest, ), ) requirements_pex = await Get[Pex]( CreatePexFromTargetClosure( addresses=test_addresses, output_filename="requirements.pex", include_source_files=False, additional_args=additional_args_for_pytest, ) ) test_runner_pex = await Get[Pex]( CreatePex, create_pex( output_filename="test_runner.pex", entry_point="pytest:main", interpreter_constraints=interpreter_constraints, additional_args=( "--pex-path", ":".join( pex_request.output_filename # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any pytest transitive requirements will shadow corresponding user # requirements which will lead to problems when APIs that are used by either # `pytest:main` or the tests themselves break between the two versions. for pex_request in (pytest_pex, requirements_pex) ), ), ), ) chrooted_sources = await Get[ChrootedPythonSources](HydratedTargets(all_targets)) directories_to_merge = [ chrooted_sources.snapshot.directory_digest, requirements_pex.directory_digest, pytest_pex.directory_digest, test_runner_pex.directory_digest, ] # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest([adaptor_with_origin], strip_source_roots=True) ) specified_source_file_names = specified_source_files.snapshot.files coverage_args = [] if run_coverage: coveragerc = await Get[Coveragerc]( CoveragercRequest(HydratedTargets(all_targets), test_time=True) ) directories_to_merge.append(coveragerc.digest) packages_to_cover = get_packages_to_cover( target=adaptor, specified_source_files=specified_source_files, ) coverage_args = [ "--cov-report=", # To not generate any output. https://pytest-cov.readthedocs.io/en/latest/config.html ] for package in packages_to_cover: coverage_args.extend(["--cov", package]) merged_input_files = await Get[Digest]( DirectoriesToMerge(directories=tuple(directories_to_merge)) ) timeout_seconds = calculate_timeout_seconds( timeouts_enabled=pytest.options.timeouts, target_timeout=getattr(adaptor, "timeout", None), timeout_default=pytest.options.timeout_default, timeout_maximum=pytest.options.timeout_maximum, ) return TestTargetSetup( test_runner_pex=test_runner_pex, args=(*pytest.options.args, *coverage_args, *sorted(specified_source_file_names)), input_files_digest=merged_input_files, timeout_seconds=timeout_seconds, )
async def run_python_test( test_target: PythonTestsAdaptor, pytest: PyTest, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment ) -> TestResult: """Runs pytest for one target.""" # TODO(7726): replace this with a proper API to get the `closure` for a # TransitiveHydratedTarget. transitive_hydrated_targets = await Get( TransitiveHydratedTargets, BuildFileAddresses((test_target.address, ))) all_targets = transitive_hydrated_targets.closure all_target_adaptors = tuple(t.adaptor for t in all_targets) interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=tuple(all_target_adaptors), python_setup=python_setup) output_pytest_requirements_pex_filename = 'pytest-with-requirements.pex' requirements = PexRequirements.create_from_adaptors( adaptors=all_target_adaptors, additional_requirements=pytest.get_requirement_strings()) resolved_requirements_pex = await Get( Pex, CreatePex( output_filename=output_pytest_requirements_pex_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, entry_point="pytest:main", )) # Get the file names for the test_target, adjusted for the source root. This allows us to # specify to Pytest which files to test and thus to avoid the test auto-discovery defined by # https://pytest.org/en/latest/goodpractices.html#test-discovery. In addition to a performance # optimization, this ensures that any transitive sources, such as a test project file named # test_fail.py, do not unintentionally end up being run as tests. source_root_stripped_test_target_sources = await Get( SourceRootStrippedSources, Address, test_target.address.to_address()) source_root_stripped_sources = await MultiGet( Get(SourceRootStrippedSources, HydratedTarget, target_adaptor) for target_adaptor in all_targets) stripped_sources_digests = tuple( stripped_sources.snapshot.directory_digest for stripped_sources in source_root_stripped_sources) sources_digest = await Get( Digest, DirectoriesToMerge(directories=stripped_sources_digests)) inits_digest = await Get(InjectedInitDigest, Digest, sources_digest) merged_input_files = await Get( Digest, DirectoriesToMerge(directories=( sources_digest, inits_digest.directory_digest, resolved_requirements_pex.directory_digest, )), ) test_target_sources_file_names = sorted( source_root_stripped_test_target_sources.snapshot.files) request = resolved_requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f'./{output_pytest_requirements_pex_filename}', pex_args=(*pytest.get_args(), *test_target_sources_file_names), input_files=merged_input_files, description=f'Run Pytest for {test_target.address.reference()}', # TODO(#8584): hook this up to TestRunnerTaskMixin so that we can configure the default timeout # and also use the specified max timeout time. timeout_seconds=getattr(test_target, 'timeout', 60)) result = await Get(FallibleExecuteProcessResult, ExecuteProcessRequest, request) return TestResult.from_fallible_execute_process_result(result)
async def lint( linter: BanditLinter, bandit: Bandit, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if bandit.options.skip: return LintResult.noop() adaptors_with_origins = linter.adaptors_with_origins # NB: Bandit output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # https://github.com/PyCQA/bandit#under-which-version-of-python-should-i-install-bandit. interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( (adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins), python_setup=python_setup, ) requirements_pex = await Get[Pex](PexRequest( output_filename="bandit.pex", requirements=PexRequirements(bandit.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=bandit.get_entry_point(), )) config_path: Optional[str] = bandit.options.config config_snapshot = await Get[Snapshot](PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--bandit-config`", )) all_source_files = await Get[SourceFiles](LegacyAllSourceFilesRequest( adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins)) specified_source_files = await Get[SourceFiles]( LegacySpecifiedSourceFilesRequest(adaptors_with_origins)) merged_input_files = await Get[Digest](DirectoriesToMerge(directories=( all_source_files.snapshot.directory_digest, requirements_pex.directory_digest, config_snapshot.directory_digest, )), ) address_references = ", ".join( sorted(adaptor_with_origin.adaptor.address.reference() for adaptor_with_origin in adaptors_with_origins)) request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./bandit.pex", pex_args=generate_args(specified_source_files=specified_source_files, bandit=bandit), input_files=merged_input_files, description=f"Run Bandit for {address_references}", ) result = await Get[FallibleExecuteProcessResult](ExecuteProcessRequest, request) return LintResult.from_fallible_execute_process_result(result)
async def setup_pytest_for_target( pytest_runner: PytestRunner, pytest: PyTest, test_options: TestOptions, python_setup: PythonSetup, ) -> TestTargetSetup: # TODO: Rather than consuming the TestOptions subsystem, the TestRunner should pass on coverage # configuration via #7490. adaptor_with_origin = pytest_runner.adaptor_with_origin adaptor = adaptor_with_origin.adaptor test_addresses = Addresses((adaptor.address,)) # TODO(John Sirois): PexInterpreterConstraints are gathered in the same way by the # `create_pex_from_target_closure` rule, factor up. transitive_hydrated_targets = await Get[TransitiveHydratedTargets](Addresses, test_addresses) all_targets = transitive_hydrated_targets.closure all_target_adaptors = [t.adaptor for t in all_targets] interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=all_target_adaptors, python_setup=python_setup ) # Ensure all pexes we merge via PEX_PATH to form the test runner use the interpreter constraints # of the tests. This is handled by CreatePexFromTargetClosure, but we must pass this through for # CreatePex requests. pex_request = functools.partial(PexRequest, interpreter_constraints=interpreter_constraints) # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly # from zip files. `zipp` has pathologically bad behavior with large zipfiles. # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term, # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata # and then by Pytest). See https://github.com/jaraco/zipp/pull/26. additional_args_for_pytest = ("--not-zip-safe",) run_coverage = test_options.values.run_coverage plugin_file_digest: Optional[Digest] = ( await Get[Digest](InputFilesContent, get_coverage_plugin_input()) if run_coverage else None ) pytest_pex_request = pex_request( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), additional_args=additional_args_for_pytest, input_files_digest=plugin_file_digest, ) requirements_pex_request = LegacyPexFromTargetsRequest( addresses=test_addresses, output_filename="requirements.pex", include_source_files=False, additional_args=additional_args_for_pytest, ) test_runner_pex_request = pex_request( output_filename="test_runner.pex", entry_point="pytest:main", interpreter_constraints=interpreter_constraints, additional_args=( "--pex-path", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any pytest transitive requirements will shadow corresponding user # requirements which will lead to problems when APIs that are used by either # `pytest:main` or the tests themselves break between the two versions. ":".join( (pytest_pex_request.output_filename, requirements_pex_request.output_filename) ), ), ) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. specified_source_files_request = LegacySpecifiedSourceFilesRequest( [adaptor_with_origin], strip_source_roots=True ) # TODO: Replace this with appropriate target API logic. python_targets = [t for t in all_targets if isinstance(t.adaptor, PythonTargetAdaptor)] resource_targets = [ t for t in all_targets if isinstance(t.adaptor, (FilesAdaptor, ResourcesAdaptor)) ] # TODO(John Sirois): Support exploiting concurrency better: # https://github.com/pantsbuild/pants/issues/9294 # Some awkward code follows in order to execute 5-6 items concurrently given the current state # of MultiGet typing / API. Improve this since we should encourage full concurrency in general. requests: List[Get[Any]] = [ Get[Pex](PexRequest, pytest_pex_request), Get[Pex](LegacyPexFromTargetsRequest, requirements_pex_request), Get[Pex](PexRequest, test_runner_pex_request), Get[ImportablePythonSources](HydratedTargets(python_targets + resource_targets)), Get[SourceFiles](LegacySpecifiedSourceFilesRequest, specified_source_files_request), ] if run_coverage: requests.append( Get[Coveragerc](CoveragercRequest(HydratedTargets(python_targets), test_time=True)), ) ( pytest_pex, requirements_pex, test_runner_pex, chrooted_sources, specified_source_files, *rest, ) = cast( Union[ Tuple[Pex, Pex, Pex, ImportablePythonSources, SourceFiles], Tuple[Pex, Pex, Pex, ImportablePythonSources, SourceFiles, Coveragerc], ], await MultiGet(requests), ) directories_to_merge = [ chrooted_sources.snapshot.directory_digest, requirements_pex.directory_digest, pytest_pex.directory_digest, test_runner_pex.directory_digest, ] if run_coverage: coveragerc = rest[0] directories_to_merge.append(coveragerc.digest) merged_input_files = await Get[Digest]( DirectoriesToMerge(directories=tuple(directories_to_merge)) ) coverage_args = [] if run_coverage: coverage_args = [ "--cov-report=", # To not generate any output. https://pytest-cov.readthedocs.io/en/latest/config.html ] # TODO: replace this with proper usage of the Target API. coverage_field = PythonCoverage(getattr(adaptor, "coverage", None), address=adaptor.address) for package in coverage_field.determine_packages_to_cover( specified_source_files=specified_source_files ): coverage_args.extend(["--cov", package]) # TODO: replace this with proper usage of the Target API. timeout_field = PythonTestsTimeout(getattr(adaptor, "timeout", None), address=adaptor.address) specified_source_file_names = sorted(specified_source_files.snapshot.files) return TestTargetSetup( test_runner_pex=test_runner_pex, args=(*pytest.options.args, *coverage_args, *specified_source_file_names), input_files_digest=merged_input_files, timeout_seconds=timeout_field.calculate_from_global_options(pytest), )