async def generate_coverage_reports( merged_coverage_data: MergedCoverageData, coverage_setup: CoverageSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, transitive_targets: TransitiveTargets, ) -> CoverageReports: """Takes all Python test results and generates a single coverage report.""" sources = await Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure, include_resources=False), ) input_digest = await Get( Digest, MergeDigests(( merged_coverage_data.coverage_data, coverage_config.digest, coverage_setup.pex.digest, sources.source_files.snapshot.digest, )), ) pex_processes = [] report_types = [] coverage_reports: List[CoverageReport] = [] for report_type in coverage_subsystem.reports: if report_type == CoverageReportType.RAW: coverage_reports.append( FilesystemCoverageReport( report_type=CoverageReportType.RAW, result_digest=merged_coverage_data.coverage_data, directory_to_materialize_to=coverage_subsystem.output_dir, report_file=coverage_subsystem.output_dir / ".coverage", )) continue report_types.append(report_type) output_file = (f"coverage.{report_type.value}" if report_type in { CoverageReportType.XML, CoverageReportType.JSON } else None) pex_processes.append( PexProcess( coverage_setup.pex, # We pass `--ignore-errors` because Pants dynamically injects missing `__init__.py` # files and this will cause Coverage to fail. argv=(report_type.report_name, "--ignore-errors"), input_digest=input_digest, output_directories=("htmlcov", ) if report_type == CoverageReportType.HTML else None, output_files=(output_file, ) if output_file else None, description= f"Generate Pytest {report_type.report_name} coverage report.", level=LogLevel.DEBUG, )) results = await MultiGet( Get(ProcessResult, PexProcess, process) for process in pex_processes) coverage_reports.extend( _get_coverage_reports(coverage_subsystem.output_dir, report_types, results)) return CoverageReports(tuple(coverage_reports))
async def create_ipython_repl_request( repl: IPythonRepl, ipython: IPython, pex_env: PexEnvironment ) -> ReplRequest: # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) # so that we can get the interpreter constraints for use in ipython_request. requirements_pex_request = await Get( PexRequest, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( Addresses(tgt.address for tgt in repl.targets), internal_only=True ), ) requirements_request = Get(Pex, PexRequest, requirements_pex_request) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(repl.targets, include_files=True) ) ipython_request = Get( Pex, PexRequest( output_filename="ipython.pex", entry_point=ipython.entry_point, requirements=PexRequirements(ipython.all_requirements), interpreter_constraints=requirements_pex_request.interpreter_constraints, internal_only=True, ), ) requirements_pex, sources, ipython_pex = await MultiGet( requirements_request, sources_request, ipython_request ) merged_digest = await Get( Digest, MergeDigests( (requirements_pex.digest, sources.source_files.snapshot.digest, ipython_pex.digest) ), ) args = [repl.in_chroot(ipython_pex.name)] if ipython.options.ignore_cwd: args.append("--ignore-cwd") chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots] extra_env = { **pex_env.environment_dict, "PEX_PATH": repl.in_chroot(requirements_pex_request.output_filename), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), } return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
async def create_python_binary_run_request( field_set: PythonBinaryFieldSet, python_binary_defaults: PythonBinaryDefaults) -> RunRequest: entry_point = field_set.entry_point.value if entry_point is None: # TODO: This is overkill? We don't need to hydrate the sources and strip snapshots, # we only need the path relative to the source root. binary_sources = await Get(HydratedSources, HydrateSourcesRequest(field_set.sources)) stripped_binary_sources = await Get( StrippedSourceFiles, SourceFiles(binary_sources.snapshot, ())) entry_point = PythonBinarySources.translate_source_file_to_entry_point( stripped_binary_sources.snapshot.files) if entry_point is None: raise InvalidFieldException( "You must either specify `sources` or `entry_point` for the target " f"{repr(field_set.address)} in order to run it, but both fields were undefined." ) transitive_targets = await Get(TransitiveTargets, Addresses([field_set.address])) output_filename = f"{field_set.address.target_name}.pex" pex_request = Get( Pex, PexFromTargetsRequest( addresses=Addresses([field_set.address]), platforms=PexPlatforms.create_from_platforms_field( field_set.platforms), output_filename=output_filename, additional_args=field_set.generate_additional_args( python_binary_defaults), include_source_files=False, ), ) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure, include_files=True), ) pex, sources = await MultiGet(pex_request, sources_request) merged_digest = await Get( Digest, MergeDigests([pex.digest, sources.source_files.snapshot.digest])) return RunRequest( digest=merged_digest, binary_name=pex.output_filename, prefix_args=("-m", entry_point), env={"PEX_EXTRA_SYS_PATH": ":".join(sources.source_roots)}, )
async def get_sources(request: SetupPySourcesRequest) -> SetupPySources: python_sources = await Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(targets=request.targets, include_resources=False, include_files=False), ) all_sources = await Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(targets=request.targets, include_resources=True, include_files=True), ) python_files = set(python_sources.stripped_source_files.snapshot.files) all_files = set(all_sources.stripped_source_files.snapshot.files) resource_files = all_files - python_files init_py_digest_contents = await Get( DigestContents, DigestSubset(python_sources.stripped_source_files.snapshot.digest, PathGlobs(["**/__init__.py"])), ) packages, namespace_packages, package_data = find_packages( python_files=python_files, resource_files=resource_files, init_py_digest_contents=init_py_digest_contents, py2=request.py2, ) return SetupPySources( digest=all_sources.stripped_source_files.snapshot.digest, packages=packages, namespace_packages=namespace_packages, package_data=package_data, )
async def create_python_repl_request(repl: PythonRepl) -> ReplRequest: pex_request = Get( Pex, PexFromTargetsRequest( (tgt.address for tgt in repl.targets), output_filename="python.pex", include_source_files=False, ), ) sources_request = Get(PythonSourceFiles, PythonSourceFilesRequest(repl.targets)) pex, sources = await MultiGet(pex_request, sources_request) merged_digest = await Get( Digest, MergeDigests((pex.digest, sources.source_files.snapshot.digest)) ) return ReplRequest( digest=merged_digest, binary_name=pex.output_filename, env={"PEX_EXTRA_SYS_PATH": ":".join(sources.source_roots)}, )
async def create_ipython_repl_request(repl: IPythonRepl, ipython: IPython) -> ReplRequest: pex_request = Get( Pex, PexFromTargetsRequest( (tgt.address for tgt in repl.targets), output_filename="ipython.pex", entry_point=ipython.entry_point, additional_requirements=ipython.all_requirements, include_source_files=True, ), ) sources_request = Get(PythonSourceFiles, PythonSourceFilesRequest(repl.targets)) pex, sources = await MultiGet(pex_request, sources_request) merged_digest = await Get( Digest, MergeDigests((pex.digest, sources.source_files.snapshot.digest)) ) return ReplRequest( digest=merged_digest, binary_name=pex.output_filename, env={"PEX_EXTRA_SYS_PATH": ":".join(sources.source_roots)}, )
async def create_python_repl_request(repl: PythonRepl, pex_env: PexEnvironment) -> ReplRequest: requirements_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( Addresses(tgt.address for tgt in repl.targets), internal_only=True ), ) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(repl.targets, include_files=True) ) requirements_pex, sources = await MultiGet(requirements_request, sources_request) merged_digest = await Get( Digest, MergeDigests((requirements_pex.digest, sources.source_files.snapshot.digest)) ) chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots] extra_env = {**pex_env.environment_dict, "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots)} return ReplRequest( digest=merged_digest, args=(repl.in_chroot(requirements_pex.name),), extra_env=extra_env )
def get_unstripped_sources( self, targets: Iterable[Target], *, include_resources: bool = True, include_files: bool = False, source_roots: Optional[List[str]] = None, extra_args: Optional[List[str]] = None, ) -> PythonSourceFiles: return self.request_product( PythonSourceFiles, [ PythonSourceFilesRequest(targets, include_resources=include_resources, include_files=include_files), create_options_bootstrapper(args=[ "--backend-packages=pants.backend.python", f"--source-root-patterns={source_roots or ['src/python']}", *(extra_args or []), ]), ], )
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get(TransitiveTargets, Addresses, request.addresses) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets) ) input_digests.append(prepared_sources.stripped_source_files.snapshot.digest) merged_input_digest = await Get(Digest, MergeDigests(input_digests)) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( ( tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility) ), python_setup, ) exact_reqs = PexRequirements.create_from_requirement_fields( ( tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField) ), additional_requirements=request.additional_requirements, ) requirements = exact_reqs description = request.description if python_setup.requirement_constraints: # In requirement strings Foo_-Bar.BAZ and foo-bar-baz refer to the same project. We let # packaging canonicalize for us. # See: https://www.python.org/dev/peps/pep-0503/#normalized-names exact_req_projects = { canonicalize_project_name(Requirement.parse(req).project_name) for req in exact_reqs } constraints_file_contents = await Get( DigestContents, PathGlobs( [python_setup.requirement_constraints], glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin="the option `--python-setup-requirement-constraints`", ), ) constraints_file_reqs = set( parse_requirements(next(iter(constraints_file_contents)).content.decode()) ) constraint_file_projects = { canonicalize_project_name(req.project_name) for req in constraints_file_reqs } unconstrained_projects = exact_req_projects - constraint_file_projects if unconstrained_projects: logger.warning( f"The constraints file {python_setup.requirement_constraints} does not contain " f"entries for the following requirements: {', '.join(unconstrained_projects)}" ) if python_setup.resolve_all_constraints == ResolveAllConstraintsOption.ALWAYS or ( python_setup.resolve_all_constraints == ResolveAllConstraintsOption.NONDEPLOYABLES and request.internal_only ): if unconstrained_projects: logger.warning( "Ignoring resolve_all_constraints setting in [python_setup] scope " "because constraints file does not cover all requirements." ) else: requirements = PexRequirements(str(req) for req in constraints_file_reqs) description = description or f"Resolving {python_setup.requirement_constraints}" elif ( python_setup.resolve_all_constraints != ResolveAllConstraintsOption.NEVER and python_setup.resolve_all_constraints_was_set_explicitly() ): raise ValueError( f"[python-setup].resolve_all_constraints is set to " f"{python_setup.resolve_all_constraints.value}, so " f"[python-setup].requirement_constraints must also be provided." ) return PexRequest( output_filename=request.output_filename, internal_only=request.internal_only, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=description, )
async def mypy_typecheck(request: MyPyRequest, mypy: MyPy) -> TypecheckResults: if mypy.skip: return TypecheckResults([], typechecker_name="MyPy") transitive_targets = await Get( TransitiveTargets, Addresses(fs.address for fs in request.field_sets)) prepared_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure), ) pex_request = Get( Pex, PexRequest( output_filename="mypy.pex", internal_only=True, requirements=PexRequirements(mypy.all_requirements), # NB: This only determines what MyPy is run with. The user can specify what version # their code is with `--python-version`. See # https://mypy.readthedocs.io/en/stable/config_file.html#platform-configuration. We do # not auto-configure this for simplicity and to avoid Pants magically setting values for # users. interpreter_constraints=PexInterpreterConstraints( mypy.interpreter_constraints), entry_point=mypy.entry_point, ), ) config_digest_request = Get( Digest, PathGlobs( globs=[mypy.config] if mypy.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--mypy-config`", ), ) prepared_sources, pex, config_digest = await MultiGet( prepared_sources_request, pex_request, config_digest_request) srcs_snapshot = prepared_sources.source_files.snapshot file_list_path = "__files.txt" python_files = "\n".join(f for f in srcs_snapshot.files if f.endswith(".py")) file_list_digest = await Get( Digest, CreateDigest([FileContent(file_list_path, python_files.encode())]), ) merged_input_files = await Get( Digest, MergeDigests([ file_list_digest, srcs_snapshot.digest, pex.digest, config_digest ]), ) result = await Get( FallibleProcessResult, PexProcess( pex, argv=generate_args(mypy, file_list_path=file_list_path), input_digest=merged_input_files, extra_env={ "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots) }, description= f"Run MyPy on {pluralize(len(srcs_snapshot.files), 'file')}.", level=LogLevel.DEBUG, ), ) return TypecheckResults( [TypecheckResult.from_fallible_process_result(result)], typechecker_name="MyPy")
async def create_python_binary_run_request( field_set: PythonBinaryFieldSet, python_binary_defaults: PythonBinaryDefaults, pex_env: PexEnvironment, ) -> RunRequest: entry_point = field_set.entry_point.value if entry_point is None: # TODO: This is overkill? We don't need to hydrate the sources and strip snapshots, # we only need the path relative to the source root. binary_sources = await Get(HydratedSources, HydrateSourcesRequest(field_set.sources)) stripped_binary_sources = await Get( StrippedSourceFiles, SourceFiles(binary_sources.snapshot, ()) ) entry_point = PythonBinarySources.translate_source_file_to_entry_point( stripped_binary_sources.snapshot.files ) if entry_point is None: raise InvalidFieldException( "You must either specify `sources` or `entry_point` for the target " f"{repr(field_set.address)} in order to run it, but both fields were undefined." ) transitive_targets = await Get(TransitiveTargets, Addresses([field_set.address])) # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) # so that we can get the interpreter constraints for use in runner_pex_request. requirements_pex_request = await Get( PexRequest, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements(Addresses([field_set.address]), internal_only=True), ) requirements_request = Get(Pex, PexRequest, requirements_pex_request) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure, include_files=True) ) output_filename = f"{field_set.address.target_name}.pex" runner_pex_request = Get( Pex, PexRequest( output_filename=output_filename, interpreter_constraints=requirements_pex_request.interpreter_constraints, additional_args=field_set.generate_additional_args(python_binary_defaults), internal_only=True, ), ) requirements, sources, runner_pex = await MultiGet( requirements_request, sources_request, runner_pex_request ) merged_digest = await Get( Digest, MergeDigests( [requirements.digest, sources.source_files.snapshot.digest, runner_pex.digest] ), ) def in_chroot(relpath: str) -> str: return os.path.join("{chroot}", relpath) chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots] extra_env = { **pex_env.environment_dict, "PEX_PATH": in_chroot(requirements_pex_request.output_filename), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), } return RunRequest( digest=merged_digest, args=(in_chroot(runner_pex.name), "-m", entry_point), extra_env=extra_env, )
async def pex_from_targets(request: PexFromTargetsRequest, python_setup: PythonSetup) -> PexRequest: transitive_targets = await Get(TransitiveTargets, Addresses, request.addresses) all_targets = transitive_targets.closure input_digests = [] if request.additional_sources: input_digests.append(request.additional_sources) if request.include_source_files: prepared_sources = await Get(StrippedPythonSourceFiles, PythonSourceFilesRequest(all_targets)) input_digests.append( prepared_sources.stripped_source_files.snapshot.digest) merged_input_digest = await Get(Digest, MergeDigests(input_digests)) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility)), python_setup, ) exact_reqs = PexRequirements.create_from_requirement_fields( (tgt[PythonRequirementsField] for tgt in all_targets if tgt.has_field(PythonRequirementsField)), additional_requirements=request.additional_requirements, ) requirements = exact_reqs if python_setup.requirement_constraints: exact_req_projects = { Requirement.parse(req).project_name for req in exact_reqs } constraints_file_contents = await Get( DigestContents, PathGlobs( [python_setup.requirement_constraints], glob_match_error_behavior=GlobMatchErrorBehavior.error, conjunction=GlobExpansionConjunction.all_match, description_of_origin= "the option `--python-setup-requirement-constraints`", ), ) constraints_file_reqs = set( parse_requirements( next(iter(constraints_file_contents)).content.decode())) constraint_file_projects = { req.project_name for req in constraints_file_reqs } unconstrained_projects = exact_req_projects - constraint_file_projects if unconstrained_projects: logger.warning( f"The constraints file {python_setup.requirement_constraints} does not contain " f"entries for the following requirements: {', '.join(unconstrained_projects)}" ) if python_setup.resolve_all_constraints: if unconstrained_projects: logger.warning( "Ignoring resolve_all_constraints setting in [python_setup] scope" "Because constraints file does not cover all requirements." ) else: requirements = PexRequirements( str(req) for req in constraints_file_reqs) elif python_setup.resolve_all_constraints: raise ValueError( "resolve_all_constraints in the [python-setup] scope is set, so " "requirement_constraints in [python-setup] must also be provided.") return PexRequest( output_filename=request.output_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, platforms=request.platforms, entry_point=request.entry_point, sources=merged_input_digest, additional_inputs=request.additional_inputs, additional_args=request.additional_args, description=request.description, )
async def pylint_lint_partition(partition: PylintPartition, pylint: Pylint) -> LintResult: # We build one PEX with Pylint requirements and another with all direct 3rd-party dependencies. # Splitting this into two PEXes gives us finer-grained caching. We then merge via `--pex-path`. plugin_requirements = PexRequirements.create_from_requirement_fields( plugin_tgt[PythonRequirementsField] for plugin_tgt in partition.plugin_targets if plugin_tgt.has_field(PythonRequirementsField)) target_requirements = PexRequirements.create_from_requirement_fields( tgt[PythonRequirementsField] for tgt in partition.targets_with_dependencies if tgt.has_field(PythonRequirementsField)) pylint_pex_request = Get( Pex, PexRequest( output_filename="pylint.pex", requirements=PexRequirements( [*pylint.all_requirements, *plugin_requirements]), interpreter_constraints=partition.interpreter_constraints, ), ) requirements_pex_request = Get( Pex, PexRequest( output_filename="requirements.pex", requirements=target_requirements, interpreter_constraints=partition.interpreter_constraints, ), ) # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any Pylint transitive requirements will shadow corresponding user # requirements, which could lead to problems. pylint_runner_pex_args = [ "--pex-path", ":".join(["pylint.pex", "requirements.pex"]) ] pylint_runner_pex_request = Get( Pex, PexRequest( output_filename="pylint_runner.pex", entry_point=pylint.entry_point, interpreter_constraints=partition.interpreter_constraints, additional_args=pylint_runner_pex_args, ), ) config_digest_request = Get( Digest, PathGlobs( globs=[pylint.config] if pylint.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", ), ) prepare_plugin_sources_request = Get( StrippedPythonSourceFiles, PythonSourceFilesRequest(partition.plugin_targets), ) prepare_python_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(partition.targets_with_dependencies), ) field_set_sources_request = Get( SourceFiles, SourceFilesRequest(field_set.sources for field_set in partition.field_sets), ) ( pylint_pex, requirements_pex, pylint_runner_pex, config_digest, prepared_plugin_sources, prepared_python_sources, field_set_sources, ) = await MultiGet( pylint_pex_request, requirements_pex_request, pylint_runner_pex_request, config_digest_request, prepare_plugin_sources_request, prepare_python_sources_request, field_set_sources_request, ) prefixed_plugin_sources = (await Get( Digest, AddPrefix( prepared_plugin_sources.stripped_source_files.snapshot.digest, "__plugins"), ) if pylint.source_plugins else EMPTY_DIGEST) pythonpath = list(prepared_python_sources.source_roots) if pylint.source_plugins: # NB: Pylint source plugins must be explicitly loaded via PEX_EXTRA_SYS_PATH. The value must # point to the plugin's directory, rather than to a parent's directory, because # `load-plugins` takes a module name rather than a path to the module; i.e. `plugin`, but # not `path.to.plugin`. (This means users must have specified the parent directory as a # source root.) pythonpath.append("__plugins") input_digest = await Get( Digest, MergeDigests(( pylint_pex.digest, requirements_pex.digest, pylint_runner_pex.digest, config_digest, prefixed_plugin_sources, prepared_python_sources.source_files.snapshot.digest, )), ) address_references = ", ".join( sorted(field_set.address.spec for field_set in partition.field_sets)) result = await Get( FallibleProcessResult, PexProcess( pylint_runner_pex, argv=generate_args(source_files=field_set_sources, pylint=pylint), input_digest=input_digest, extra_env={"PEX_EXTRA_SYS_PATH": ":".join(pythonpath)}, description= (f"Run Pylint on {pluralize(len(partition.field_sets), 'target')}: " f"{address_references}."), ), ) return LintResult.from_fallible_process_result(result, linter_name="Pylint")
async def setup_pytest_for_target( field_set: PythonTestFieldSet, pytest: PyTest, test_subsystem: TestSubsystem, python_setup: PythonSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, ) -> TestTargetSetup: test_addresses = Addresses((field_set.address, )) transitive_targets = await Get(TransitiveTargets, Addresses, test_addresses) all_targets = transitive_targets.closure interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility)), python_setup, ) # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly # from zip files. `zipp` has pathologically bad behavior with large zipfiles. # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term, # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata # and then by Pytest). See https://github.com/jaraco/zipp/pull/26. additional_args_for_pytest = ("--not-zip-safe", ) pytest_pex_request = Get( Pex, PexRequest( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), interpreter_constraints=interpreter_constraints, additional_args=additional_args_for_pytest, internal_only=True, ), ) # Defaults to zip_safe=False. requirements_pex_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements(test_addresses, internal_only=True), ) test_runner_pex_request = Get( Pex, PexRequest( interpreter_constraints=interpreter_constraints, output_filename="test_runner.pex", entry_point="pytest:main", additional_args=( "--pex-path", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any pytest transitive requirements will shadow corresponding user # requirements which will lead to problems when APIs that are used by either # `pytest:main` or the tests themselves break between the two versions. ":".join(( pytest_pex_request.subject.output_filename, requirements_pex_request.subject.output_filename, )), ), internal_only=True, ), ) prepared_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. field_set_source_files_request = Get( SourceFiles, SourceFilesRequest([field_set.sources])) ( pytest_pex, requirements_pex, test_runner_pex, prepared_sources, field_set_source_files, ) = await MultiGet( pytest_pex_request, requirements_pex_request, test_runner_pex_request, prepared_sources_request, field_set_source_files_request, ) input_digest = await Get( Digest, MergeDigests(( coverage_config.digest, prepared_sources.source_files.snapshot.digest, requirements_pex.digest, pytest_pex.digest, test_runner_pex.digest, )), ) coverage_args = [] if test_subsystem.use_coverage: cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else ( ".", ) coverage_args = [ "--cov-report=", # Turn off output. *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths), ] return TestTargetSetup( test_runner_pex=test_runner_pex, args=(*pytest.options.args, *coverage_args, *field_set_source_files.files), input_digest=input_digest, source_roots=prepared_sources.source_roots, timeout_seconds=field_set.timeout.calculate_from_global_options( pytest), xml_dir=pytest.options.junit_xml_dir, junit_family=pytest.options.junit_family, execution_slot_variable=pytest.options.execution_slot_var, )
async def setup_pytest_for_target( request: TestSetupRequest, pytest: PyTest, test_subsystem: TestSubsystem, python_setup: PythonSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, global_options: GlobalOptions, ) -> TestSetup: test_addresses = Addresses((request.field_set.address, )) transitive_targets = await Get(TransitiveTargets, Addresses, test_addresses) all_targets = transitive_targets.closure interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility)), python_setup, ) # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly # from zip files. `zipp` has pathologically bad behavior with large zipfiles. # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term, # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata # and then by Pytest). See https://github.com/jaraco/zipp/pull/26. additional_args_for_pytest = ("--not-zip-safe", ) pytest_pex_request = Get( Pex, PexRequest( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), interpreter_constraints=interpreter_constraints, additional_args=additional_args_for_pytest, internal_only=True, ), ) # Defaults to zip_safe=False. requirements_pex_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements(test_addresses, internal_only=True), ) test_runner_pex_request = Get( Pex, PexRequest( interpreter_constraints=interpreter_constraints, output_filename="test_runner.pex", entry_point="pytest:main", additional_args=( "--pex-path", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any pytest transitive requirements will shadow corresponding user # requirements which will lead to problems when APIs that are used by either # `pytest:main` or the tests themselves break between the two versions. ":".join(( pytest_pex_request.subject.output_filename, requirements_pex_request.subject.output_filename, )), ), internal_only=True, ), ) prepared_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. field_set_source_files_request = Get( SourceFiles, SourceFilesRequest([request.field_set.sources])) ( pytest_pex, requirements_pex, test_runner_pex, prepared_sources, field_set_source_files, ) = await MultiGet( pytest_pex_request, requirements_pex_request, test_runner_pex_request, prepared_sources_request, field_set_source_files_request, ) input_digest = await Get( Digest, MergeDigests(( coverage_config.digest, prepared_sources.source_files.snapshot.digest, requirements_pex.digest, pytest_pex.digest, test_runner_pex.digest, )), ) add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"] output_files = [] results_file_name = None if pytest.options.junit_xml_dir and not request.is_debug: results_file_name = f"{request.field_set.address.path_safe_spec}.xml" add_opts.extend((f"--junitxml={results_file_name}", "-o", f"junit_family={pytest.options.junit_family}")) output_files.append(results_file_name) coverage_args = [] if test_subsystem.use_coverage and not request.is_debug: output_files.append(".coverage") cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else ( ".", ) coverage_args = [ "--cov-report=", # Turn off output. *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths), ] extra_env = { "PYTEST_ADDOPTS": " ".join(add_opts), "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots), } if test_subsystem.force and not request.is_debug: # This is a slightly hacky way to force the process to run: since the env var # value is unique, this input combination will never have been seen before, # and therefore never cached. The two downsides are: # 1. This leaks into the test's environment, albeit with a funky var name that is # unlikely to cause problems in practice. # 2. This run will be cached even though it can never be re-used. # TODO: A more principled way of forcing rules to run? uuid = await Get(UUID, UUIDRequest()) extra_env["__PANTS_FORCE_TEST_RUN__"] = str(uuid) process = await Get( Process, PexProcess( test_runner_pex, argv=(*pytest.options.args, *coverage_args, *field_set_source_files.files), extra_env=extra_env, input_digest=input_digest, output_files=output_files, timeout_seconds=request.field_set.timeout. calculate_from_global_options(pytest), execution_slot_variable=pytest.options.execution_slot_var, description=f"Run Pytest for {request.field_set.address}", level=LogLevel.DEBUG, ), ) return TestSetup(process, results_file_name=results_file_name)