async def create_python_binary( field_set: PythonBinaryFieldSet, python_binary_defaults: PythonBinaryDefaults ) -> CreatedBinary: entry_point = field_set.entry_point.value if entry_point is None: # TODO: This is overkill? We don't need to hydrate the sources and strip snapshots, # we only need the path relative to the source root. binary_sources = await Get(HydratedSources, HydrateSourcesRequest(field_set.sources)) stripped_binary_sources = await Get( StrippedSourceFiles, SourceFiles(binary_sources.snapshot, ()) ) entry_point = PythonBinarySources.translate_source_file_to_entry_point( stripped_binary_sources.snapshot.files ) output_filename = f"{field_set.address.target_name}.pex" two_step_pex = await Get( TwoStepPex, TwoStepPexFromTargetsRequest( PexFromTargetsRequest( addresses=[field_set.address], entry_point=entry_point, platforms=PexPlatforms.create_from_platforms_field(field_set.platforms), output_filename=output_filename, additional_args=field_set.generate_additional_args(python_binary_defaults), ) ), ) pex = two_step_pex.pex return CreatedBinary(digest=pex.digest, binary_name=pex.output_filename)
async def create_python_awslambda( config: PythonAwsLambdaConfiguration, lambdex_setup: LambdexSetup, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> CreatedAWSLambda: # TODO: We must enforce that everything is built for Linux, no matter the local platform. pex_filename = f"{config.address.target_name}.pex" pex_request = PexFromTargetsRequest( addresses=Addresses([config.address]), entry_point=None, output_filename=pex_filename, ) pex = await Get[Pex](PexFromTargetsRequest, pex_request) merged_input_files = await Get[Digest]( DirectoriesToMerge( directories=(pex.directory_digest, lambdex_setup.requirements_pex.directory_digest) ) ) # NB: Lambdex modifies its input pex in-place, so the input file is also the output file. lambdex_args = ("build", "-e", config.handler.value, pex_filename) process_request = lambdex_setup.requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./lambdex.pex", pex_args=lambdex_args, input_files=merged_input_files, output_files=(pex_filename,), description=f"Run Lambdex for {config.address.reference()}", ) result = await Get[ExecuteProcessResult](ExecuteProcessRequest, process_request) return CreatedAWSLambda(digest=result.output_directory_digest, name=pex_filename)
async def create_python_binary( config: PythonBinaryConfiguration) -> CreatedBinary: entry_point: Optional[str] if config.entry_point.value is not None: entry_point = config.entry_point.value else: source_files = await Get[SourceFiles](AllSourceFilesRequest( [config.sources], strip_source_roots=True)) # NB: `PythonBinarySources` enforces that we have 0-1 sources. if len(source_files.files) == 1: module_name = source_files.files[0] entry_point = PythonBinary.translate_source_path_to_py_module_specifier( module_name) else: entry_point = None output_filename = f"{config.address.target_name}.pex" two_step_pex = await Get[TwoStepPex](TwoStepPexFromTargetsRequest( PexFromTargetsRequest( addresses=Addresses([config.address]), entry_point=entry_point, platforms=PexPlatforms.create_from_platforms_field( config.platforms), output_filename=output_filename, additional_args=config.generate_additional_args(), ))) pex = two_step_pex.pex return CreatedBinary(digest=pex.directory_digest, binary_name=pex.output_filename)
async def run_python_repl(repl: PythonRepl) -> ReplBinary: addresses = Addresses(tgt.address for tgt in repl.targets) two_step_pex = await Get[TwoStepPex]( TwoStepPexFromTargetsRequest( PexFromTargetsRequest(addresses=addresses, output_filename="python-repl.pex",) ) ) repl_pex = two_step_pex.pex return ReplBinary(digest=repl_pex.digest, binary_name=repl_pex.output_filename)
async def create_python_awslambda( field_set: PythonAwsLambdaFieldSet, lambdex_setup: LambdexSetup) -> CreatedAWSLambda: # Lambdas typically use the .zip suffix, so we use that instead of .pex. pex_filename = f"{field_set.address.target_name}.zip" # We hardcode the platform value to the appropriate one for each AWS Lambda runtime. # (Running the "hello world" lambda in the example code will report the platform, and can be # used to verify correctness of these platform strings.) py_major, py_minor = field_set.runtime.to_interpreter_version() platform = f"linux_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}" # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707 if py_major <= 3 and py_minor < 8: platform += "m" if (py_major, py_minor) == (2, 7): platform += "u" pex_request = TwoStepPexFromTargetsRequest( PexFromTargetsRequest( addresses=[field_set.address], internal_only=False, entry_point=None, output_filename=pex_filename, platforms=PexPlatforms([platform]), additional_args=[ # Ensure we can resolve manylinux wheels in addition to any AMI-specific wheels. "--manylinux=manylinux2014", # When we're executing Pex on Linux, allow a local interpreter to be resolved if # available and matching the AMI platform. "--resolve-local-platforms", ], )) pex_result = await Get(TwoStepPex, TwoStepPexFromTargetsRequest, pex_request) input_digest = await Get( Digest, MergeDigests( (pex_result.pex.digest, lambdex_setup.requirements_pex.digest))) # NB: Lambdex modifies its input pex in-place, so the input file is also the output file. result = await Get( ProcessResult, PexProcess( lambdex_setup.requirements_pex, argv=("build", "-e", field_set.handler.value, pex_filename), input_digest=input_digest, output_files=(pex_filename, ), description=f"Setting up handler in {pex_filename}", ), ) return CreatedAWSLambda( digest=result.output_digest, zip_file_relpath=pex_filename, runtime=field_set.runtime.value, # The AWS-facing handler function is always lambdex_handler.handler, which is the wrapper # injected by lambdex that manages invocation of the actual handler. handler="lambdex_handler.handler", )
async def run_ipython_repl(repl: IPythonRepl, ipython: IPython) -> ReplBinary: addresses = Addresses(tgt.address for tgt in repl.targets) two_step_pex = await Get[TwoStepPex](TwoStepPexFromTargetsRequest( PexFromTargetsRequest( addresses=addresses, output_filename="ipython-repl.pex", entry_point=ipython.get_entry_point(), additional_requirements=ipython.get_requirement_specs(), ))) repl_pex = two_step_pex.pex return ReplBinary(digest=repl_pex.digest, binary_name=repl_pex.output_filename)
async def create_ipython_repl_request( repl: IPythonRepl, ipython: IPython, pex_env: PexEnvironment ) -> ReplRequest: # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) # so that we can get the interpreter constraints for use in ipython_request. requirements_pex_request = await Get( PexRequest, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( Addresses(tgt.address for tgt in repl.targets), internal_only=True ), ) requirements_request = Get(Pex, PexRequest, requirements_pex_request) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(repl.targets, include_files=True) ) ipython_request = Get( Pex, PexRequest( output_filename="ipython.pex", entry_point=ipython.entry_point, requirements=PexRequirements(ipython.all_requirements), interpreter_constraints=requirements_pex_request.interpreter_constraints, internal_only=True, ), ) requirements_pex, sources, ipython_pex = await MultiGet( requirements_request, sources_request, ipython_request ) merged_digest = await Get( Digest, MergeDigests( (requirements_pex.digest, sources.source_files.snapshot.digest, ipython_pex.digest) ), ) args = [repl.in_chroot(ipython_pex.name)] if ipython.options.ignore_cwd: args.append("--ignore-cwd") chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots] extra_env = { **pex_env.environment_dict, "PEX_PATH": repl.in_chroot(requirements_pex_request.output_filename), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), } return ReplRequest(digest=merged_digest, args=args, extra_env=extra_env)
async def create_python_awslambda( field_set: PythonAwsLambdaFieldSet, lambdex_setup: LambdexSetup, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> CreatedAWSLambda: # Lambdas typically use the .zip suffix, so we use that instead of .pex. pex_filename = f"{field_set.address.target_name}.zip" # We hardcode the platform value to the appropriate one for each AWS Lambda runtime. # (Running the "hello world" lambda in the example code will report the platform, and can be # used to verify correctness of these platform strings.) py_major, py_minor = field_set.runtime.to_interpreter_version() platform = f"manylinux2014_x86_64-cp-{py_major}{py_minor}-cp{py_major}{py_minor}" # set pymalloc ABI flag - this was removed in python 3.8 https://bugs.python.org/issue36707 if py_major <= 3 and py_minor < 8: platform += "m" if (py_major, py_minor) == (2, 7): platform += "u" pex_request = TwoStepPexFromTargetsRequest( PexFromTargetsRequest( addresses=Addresses([field_set.address]), entry_point=None, output_filename=pex_filename, platforms=PexPlatforms([platform]), )) pex_result = await Get[TwoStepPex](TwoStepPexFromTargetsRequest, pex_request) input_digest = await Get[Digest](MergeDigests( (pex_result.pex.digest, lambdex_setup.requirements_pex.digest))) # NB: Lambdex modifies its input pex in-place, so the input file is also the output file. lambdex_args = ("build", "-e", field_set.handler.value, pex_filename) process = lambdex_setup.requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./lambdex.pex", pex_args=lambdex_args, input_digest=input_digest, output_files=(pex_filename, ), description=f"Setting up handler in {pex_filename}", ) result = await Get[ProcessResult](Process, process) # Note that the AWS-facing handler function is always lambdex_handler.handler, which # is the wrapper injected by lambdex that manages invocation of the actual handler. return CreatedAWSLambda( digest=result.output_digest, name=pex_filename, runtime=field_set.runtime.value, handler="lambdex_handler.handler", )
async def create_python_binary_run_request( field_set: PythonBinaryFieldSet, python_binary_defaults: PythonBinaryDefaults) -> RunRequest: entry_point = field_set.entry_point.value if entry_point is None: # TODO: This is overkill? We don't need to hydrate the sources and strip snapshots, # we only need the path relative to the source root. binary_sources = await Get(HydratedSources, HydrateSourcesRequest(field_set.sources)) stripped_binary_sources = await Get( StrippedSourceFiles, SourceFiles(binary_sources.snapshot, ())) entry_point = PythonBinarySources.translate_source_file_to_entry_point( stripped_binary_sources.snapshot.files) if entry_point is None: raise InvalidFieldException( "You must either specify `sources` or `entry_point` for the target " f"{repr(field_set.address)} in order to run it, but both fields were undefined." ) transitive_targets = await Get(TransitiveTargets, Addresses([field_set.address])) output_filename = f"{field_set.address.target_name}.pex" pex_request = Get( Pex, PexFromTargetsRequest( addresses=Addresses([field_set.address]), platforms=PexPlatforms.create_from_platforms_field( field_set.platforms), output_filename=output_filename, additional_args=field_set.generate_additional_args( python_binary_defaults), include_source_files=False, ), ) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure, include_files=True), ) pex, sources = await MultiGet(pex_request, sources_request) merged_digest = await Get( Digest, MergeDigests([pex.digest, sources.source_files.snapshot.digest])) return RunRequest( digest=merged_digest, binary_name=pex.output_filename, prefix_args=("-m", entry_point), env={"PEX_EXTRA_SYS_PATH": ":".join(sources.source_roots)}, )
async def create_python_repl_request(repl: PythonRepl) -> ReplRequest: pex_request = Get( Pex, PexFromTargetsRequest( (tgt.address for tgt in repl.targets), output_filename="python.pex", include_source_files=False, ), ) sources_request = Get(PythonSourceFiles, PythonSourceFilesRequest(repl.targets)) pex, sources = await MultiGet(pex_request, sources_request) merged_digest = await Get( Digest, MergeDigests((pex.digest, sources.source_files.snapshot.digest)) ) return ReplRequest( digest=merged_digest, binary_name=pex.output_filename, env={"PEX_EXTRA_SYS_PATH": ":".join(sources.source_roots)}, )
async def create_python_binary( field_set: PythonBinaryFieldSet) -> CreatedBinary: entry_point = field_set.entry_point.value if entry_point is None: source_files = await Get[SourceFiles](AllSourceFilesRequest( [field_set.sources], strip_source_roots=True)) entry_point = PythonBinarySources.translate_source_file_to_entry_point( source_files.files) output_filename = f"{field_set.address.target_name}.pex" two_step_pex = await Get[TwoStepPex](TwoStepPexFromTargetsRequest( PexFromTargetsRequest( addresses=Addresses([field_set.address]), entry_point=entry_point, platforms=PexPlatforms.create_from_platforms_field( field_set.platforms), output_filename=output_filename, additional_args=field_set.generate_additional_args(), ))) pex = two_step_pex.pex return CreatedBinary(digest=pex.digest, binary_name=pex.output_filename)
async def create_ipython_repl_request(repl: IPythonRepl, ipython: IPython) -> ReplRequest: pex_request = Get( Pex, PexFromTargetsRequest( (tgt.address for tgt in repl.targets), output_filename="ipython.pex", entry_point=ipython.entry_point, additional_requirements=ipython.all_requirements, include_source_files=True, ), ) sources_request = Get(PythonSourceFiles, PythonSourceFilesRequest(repl.targets)) pex, sources = await MultiGet(pex_request, sources_request) merged_digest = await Get( Digest, MergeDigests((pex.digest, sources.source_files.snapshot.digest)) ) return ReplRequest( digest=merged_digest, binary_name=pex.output_filename, env={"PEX_EXTRA_SYS_PATH": ":".join(sources.source_roots)}, )
async def create_python_repl_request(repl: PythonRepl, pex_env: PexEnvironment) -> ReplRequest: requirements_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements( Addresses(tgt.address for tgt in repl.targets), internal_only=True ), ) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(repl.targets, include_files=True) ) requirements_pex, sources = await MultiGet(requirements_request, sources_request) merged_digest = await Get( Digest, MergeDigests((requirements_pex.digest, sources.source_files.snapshot.digest)) ) chrooted_source_roots = [repl.in_chroot(sr) for sr in sources.source_roots] extra_env = {**pex_env.environment_dict, "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots)} return ReplRequest( digest=merged_digest, args=(repl.in_chroot(requirements_pex.name),), extra_env=extra_env )
async def setup_pytest_for_target( field_set: PythonTestFieldSet, pytest: PyTest, test_options: TestOptions, python_setup: PythonSetup, ) -> TestTargetSetup: # TODO: Rather than consuming the TestOptions subsystem, the TestRunner should pass on coverage # configuration via #7490. test_addresses = Addresses((field_set.address, )) transitive_targets = await Get[TransitiveTargets](Addresses, test_addresses) all_targets = transitive_targets.closure interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility)), python_setup, ) # Ensure all pexes we merge via PEX_PATH to form the test runner use the interpreter constraints # of the tests. This is handled by CreatePexFromTargetClosure, but we must pass this through for # CreatePex requests. pex_request = functools.partial( PexRequest, interpreter_constraints=interpreter_constraints) # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly # from zip files. `zipp` has pathologically bad behavior with large zipfiles. # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term, # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata # and then by Pytest). See https://github.com/jaraco/zipp/pull/26. additional_args_for_pytest = ("--not-zip-safe", ) use_coverage = test_options.values.use_coverage plugin_file_digest: Optional[Digest] = (await Get[Digest]( InputFilesContent, COVERAGE_PLUGIN_INPUT) if use_coverage else None) pytest_pex_request = Get[Pex]( PexRequest, pex_request( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), additional_args=additional_args_for_pytest, sources=plugin_file_digest, ), ) requirements_pex_request = Get[Pex](PexFromTargetsRequest( addresses=test_addresses, output_filename="requirements.pex", include_source_files=False, additional_args=additional_args_for_pytest, )) test_runner_pex_request = Get[Pex]( PexRequest, pex_request( output_filename="test_runner.pex", entry_point="pytest:main", interpreter_constraints=interpreter_constraints, additional_args=( "--pex-path", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any pytest transitive requirements will shadow corresponding user # requirements which will lead to problems when APIs that are used by either # `pytest:main` or the tests themselves break between the two versions. ":".join(( pytest_pex_request.subject.output_filename, requirements_pex_request.subject.output_filename, )), ), ), ) prepared_sources_request = Get[ImportablePythonSources]( Targets(all_targets)) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. specified_source_files_request = Get[SourceFiles]( SpecifiedSourceFilesRequest([(field_set.sources, field_set.origin)], strip_source_roots=True)) requests = ( pytest_pex_request, requirements_pex_request, test_runner_pex_request, prepared_sources_request, specified_source_files_request, ) ( coverage_config, pytest_pex, requirements_pex, test_runner_pex, prepared_sources, specified_source_files, ) = (await MultiGet( Get( CoverageConfig, CoverageConfigRequest( Targets((tgt for tgt in all_targets if tgt.has_field(PythonSources))), is_test_time=True, ), ), *requests, ) if use_coverage else (CoverageConfig(EMPTY_DIGEST), *await MultiGet(*requests))) digests_to_merge = [ coverage_config.digest, prepared_sources.snapshot.digest, requirements_pex.digest, pytest_pex.digest, test_runner_pex.digest, ] input_digest = await Get[Digest](MergeDigests(digests_to_merge)) coverage_args = [] if use_coverage: coverage_args = [ "--cov-report=", # To not generate any output. https://pytest-cov.readthedocs.io/en/latest/config.html ] for package in field_set.coverage.determine_packages_to_cover( specified_source_files=specified_source_files): coverage_args.extend(["--cov", package]) specified_source_file_names = sorted(specified_source_files.snapshot.files) return TestTargetSetup( test_runner_pex=test_runner_pex, args=(*pytest.options.args, *coverage_args, *specified_source_file_names), input_digest=input_digest, timeout_seconds=field_set.timeout.calculate_from_global_options( pytest), xml_dir=pytest.options.junit_xml_dir, junit_family=pytest.options.junit_family, )
def test_constraints_validation(self) -> None: self.add_to_build_file( "", dedent(""" python_requirement_library(name="foo", requirements=[python_requirement("foo>=0.1.2")]) python_requirement_library(name="bar", requirements=[ python_requirement("bar==5.5.5")]) python_requirement_library(name="baz", requirements=[python_requirement("baz")]) python_library(name="tgt", sources=[], dependencies=[":foo", ":bar", ":baz"]) """), ) self.create_file( "constraints1.txt", dedent(""" foo==1.0.0 bar==5.5.5 baz==2.2.2 qux==3.4.5 """), ) self.create_file( "constraints2.txt", dedent(""" foo==1.0.0 bar==5.5.5 qux==3.4.5 """), ) request = PexFromTargetsRequest([Address.parse("//:tgt")], output_filename="dummy.pex") def get_pex_request(constraints_file: Optional[str], resolve_all: bool) -> PexRequest: args = [ "--backend-packages=pants.backend.python", f"--python-setup-resolve-all-constraints={resolve_all}", ] if constraints_file: args.append( f"--python-setup-requirement-constraints={constraints_file}" ) return self.request_single_product( PexRequest, Params(request, create_options_bootstrapper(args=args))) pex_req1 = get_pex_request("constraints1.txt", False) assert pex_req1.requirements == PexRequirements( ["foo>=0.1.2", "bar==5.5.5", "baz"]) pex_req2 = get_pex_request("constraints1.txt", True) assert pex_req2.requirements == PexRequirements( ["foo==1.0.0", "bar==5.5.5", "baz==2.2.2", "qux==3.4.5"]) with self.assertRaises(ExecutionError) as err: get_pex_request(None, True) assert len(err.exception.wrapped_exceptions) == 1 assert isinstance(err.exception.wrapped_exceptions[0], ValueError) assert ( "resolve_all_constraints in the [python-setup] scope is set, so " "requirement_constraints in [python-setup] must also be provided." ) in str(err.exception)
async def setup_pytest_for_target( request: TestSetupRequest, pytest: PyTest, test_subsystem: TestSubsystem, python_setup: PythonSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, global_options: GlobalOptions, ) -> TestSetup: test_addresses = Addresses((request.field_set.address, )) transitive_targets = await Get(TransitiveTargets, Addresses, test_addresses) all_targets = transitive_targets.closure interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility)), python_setup, ) # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly # from zip files. `zipp` has pathologically bad behavior with large zipfiles. # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term, # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata # and then by Pytest). See https://github.com/jaraco/zipp/pull/26. additional_args_for_pytest = ("--not-zip-safe", ) pytest_pex_request = Get( Pex, PexRequest( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), interpreter_constraints=interpreter_constraints, additional_args=additional_args_for_pytest, internal_only=True, ), ) # Defaults to zip_safe=False. requirements_pex_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements(test_addresses, internal_only=True), ) test_runner_pex_request = Get( Pex, PexRequest( interpreter_constraints=interpreter_constraints, output_filename="test_runner.pex", entry_point="pytest:main", additional_args=( "--pex-path", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any pytest transitive requirements will shadow corresponding user # requirements which will lead to problems when APIs that are used by either # `pytest:main` or the tests themselves break between the two versions. ":".join(( pytest_pex_request.subject.output_filename, requirements_pex_request.subject.output_filename, )), ), internal_only=True, ), ) prepared_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. field_set_source_files_request = Get( SourceFiles, SourceFilesRequest([request.field_set.sources])) ( pytest_pex, requirements_pex, test_runner_pex, prepared_sources, field_set_source_files, ) = await MultiGet( pytest_pex_request, requirements_pex_request, test_runner_pex_request, prepared_sources_request, field_set_source_files_request, ) input_digest = await Get( Digest, MergeDigests(( coverage_config.digest, prepared_sources.source_files.snapshot.digest, requirements_pex.digest, pytest_pex.digest, test_runner_pex.digest, )), ) add_opts = [f"--color={'yes' if global_options.options.colors else 'no'}"] output_files = [] results_file_name = None if pytest.options.junit_xml_dir and not request.is_debug: results_file_name = f"{request.field_set.address.path_safe_spec}.xml" add_opts.extend((f"--junitxml={results_file_name}", "-o", f"junit_family={pytest.options.junit_family}")) output_files.append(results_file_name) coverage_args = [] if test_subsystem.use_coverage and not request.is_debug: output_files.append(".coverage") cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else ( ".", ) coverage_args = [ "--cov-report=", # Turn off output. *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths), ] extra_env = { "PYTEST_ADDOPTS": " ".join(add_opts), "PEX_EXTRA_SYS_PATH": ":".join(prepared_sources.source_roots), } if test_subsystem.force and not request.is_debug: # This is a slightly hacky way to force the process to run: since the env var # value is unique, this input combination will never have been seen before, # and therefore never cached. The two downsides are: # 1. This leaks into the test's environment, albeit with a funky var name that is # unlikely to cause problems in practice. # 2. This run will be cached even though it can never be re-used. # TODO: A more principled way of forcing rules to run? uuid = await Get(UUID, UUIDRequest()) extra_env["__PANTS_FORCE_TEST_RUN__"] = str(uuid) process = await Get( Process, PexProcess( test_runner_pex, argv=(*pytest.options.args, *coverage_args, *field_set_source_files.files), extra_env=extra_env, input_digest=input_digest, output_files=output_files, timeout_seconds=request.field_set.timeout. calculate_from_global_options(pytest), execution_slot_variable=pytest.options.execution_slot_var, description=f"Run Pytest for {request.field_set.address}", level=LogLevel.DEBUG, ), ) return TestSetup(process, results_file_name=results_file_name)
async def create_python_binary_run_request( field_set: PythonBinaryFieldSet, python_binary_defaults: PythonBinaryDefaults, pex_env: PexEnvironment, ) -> RunRequest: entry_point = field_set.entry_point.value if entry_point is None: # TODO: This is overkill? We don't need to hydrate the sources and strip snapshots, # we only need the path relative to the source root. binary_sources = await Get(HydratedSources, HydrateSourcesRequest(field_set.sources)) stripped_binary_sources = await Get( StrippedSourceFiles, SourceFiles(binary_sources.snapshot, ()) ) entry_point = PythonBinarySources.translate_source_file_to_entry_point( stripped_binary_sources.snapshot.files ) if entry_point is None: raise InvalidFieldException( "You must either specify `sources` or `entry_point` for the target " f"{repr(field_set.address)} in order to run it, but both fields were undefined." ) transitive_targets = await Get(TransitiveTargets, Addresses([field_set.address])) # Note that we get an intermediate PexRequest here (instead of going straight to a Pex) # so that we can get the interpreter constraints for use in runner_pex_request. requirements_pex_request = await Get( PexRequest, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements(Addresses([field_set.address]), internal_only=True), ) requirements_request = Get(Pex, PexRequest, requirements_pex_request) sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(transitive_targets.closure, include_files=True) ) output_filename = f"{field_set.address.target_name}.pex" runner_pex_request = Get( Pex, PexRequest( output_filename=output_filename, interpreter_constraints=requirements_pex_request.interpreter_constraints, additional_args=field_set.generate_additional_args(python_binary_defaults), internal_only=True, ), ) requirements, sources, runner_pex = await MultiGet( requirements_request, sources_request, runner_pex_request ) merged_digest = await Get( Digest, MergeDigests( [requirements.digest, sources.source_files.snapshot.digest, runner_pex.digest] ), ) def in_chroot(relpath: str) -> str: return os.path.join("{chroot}", relpath) chrooted_source_roots = [in_chroot(sr) for sr in sources.source_roots] extra_env = { **pex_env.environment_dict, "PEX_PATH": in_chroot(requirements_pex_request.output_filename), "PEX_EXTRA_SYS_PATH": ":".join(chrooted_source_roots), } return RunRequest( digest=merged_digest, args=(in_chroot(runner_pex.name), "-m", entry_point), extra_env=extra_env, )
def test_constraints_validation(self) -> None: self.add_to_build_file( "", dedent(""" python_requirement_library(name="foo", requirements=["foo-bar>=0.1.2"]) python_requirement_library(name="bar", requirements=["bar==5.5.5"]) python_requirement_library(name="baz", requirements=["baz"]) python_library(name="tgt", sources=[], dependencies=[":foo", ":bar", ":baz"]) """), ) self.create_file( "constraints1.txt", dedent(""" Foo._-BAR==1.0.0 bar==5.5.5 baz==2.2.2 qux==3.4.5 """), ) self.create_file( "constraints2.txt", dedent(""" foo==1.0.0 bar==5.5.5 qux==3.4.5 """), ) request = PexFromTargetsRequest([Address.parse("//:tgt")], output_filename="demo.pex", internal_only=True) def get_pex_request( constraints_file: Optional[str], resolve_all: Optional[ResolveAllConstraintsOption] ) -> PexRequest: args = [ "--backend-packages=pants.backend.python", ] if resolve_all: args.append( f"--python-setup-resolve-all-constraints={resolve_all.value}" ) if constraints_file: args.append( f"--python-setup-requirement-constraints={constraints_file}" ) return self.request_single_product( PexRequest, Params(request, create_options_bootstrapper(args=args))) pex_req1 = get_pex_request("constraints1.txt", ResolveAllConstraintsOption.NEVER) assert pex_req1.requirements == PexRequirements( ["foo-bar>=0.1.2", "bar==5.5.5", "baz"]) pex_req2 = get_pex_request("constraints1.txt", ResolveAllConstraintsOption.ALWAYS) assert pex_req2.requirements == PexRequirements( ["Foo._-BAR==1.0.0", "bar==5.5.5", "baz==2.2.2", "qux==3.4.5"]) with self.assertRaises(ExecutionError) as err: get_pex_request(None, ResolveAllConstraintsOption.ALWAYS) assert len(err.exception.wrapped_exceptions) == 1 assert isinstance(err.exception.wrapped_exceptions[0], ValueError) assert ("[python-setup].resolve_all_constraints is set to always, so " "[python-setup].requirement_constraints must also be provided." ) in str(err.exception) # Shouldn't error, as we don't explicitly set --resolve-all-constraints. get_pex_request(None, None)
async def setup_pytest_for_target( config: PythonTestConfiguration, pytest: PyTest, test_options: TestOptions, python_setup: PythonSetup, ) -> TestTargetSetup: # TODO: Rather than consuming the TestOptions subsystem, the TestRunner should pass on coverage # configuration via #7490. test_addresses = Addresses((config.address, )) # TODO(John Sirois): PexInterpreterConstraints are gathered in the same way by the # `create_pex_from_target_closure` rule, factor up. transitive_targets = await Get[TransitiveTargets](Addresses, test_addresses) all_targets = transitive_targets.closure # TODO: factor this up? It's mostly duplicated with pex_from_targets.py. python_targets = [] resource_targets = [] for tgt in all_targets: if tgt.has_field(PythonSources): python_targets.append(tgt) # NB: PythonRequirementsFileSources is a subclass of FilesSources. We filter it out so that # requirements.txt is not included in the PEX and so that irrelevant changes to it (e.g. # whitespace changes) do not invalidate the PEX. if tgt.has_field(ResourcesSources) or ( tgt.has_field(FilesSources) and not tgt.has_field(PythonRequirementsFileSources)): resource_targets.append(tgt) interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt.get(PythonInterpreterCompatibility) for tgt in python_targets), python_setup) # Ensure all pexes we merge via PEX_PATH to form the test runner use the interpreter constraints # of the tests. This is handled by CreatePexFromTargetClosure, but we must pass this through for # CreatePex requests. pex_request = functools.partial( PexRequest, interpreter_constraints=interpreter_constraints) # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly # from zip files. `zipp` has pathologically bad behavior with large zipfiles. # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term, # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata # and then by Pytest). See https://github.com/jaraco/zipp/pull/26. additional_args_for_pytest = ("--not-zip-safe", ) run_coverage = test_options.values.run_coverage plugin_file_digest: Optional[Digest] = (await Get[Digest]( InputFilesContent, COVERAGE_PLUGIN_INPUT) if run_coverage else None) pytest_pex_request = pex_request( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), additional_args=additional_args_for_pytest, sources=plugin_file_digest, ) requirements_pex_request = PexFromTargetsRequest( addresses=test_addresses, output_filename="requirements.pex", include_source_files=False, additional_args=additional_args_for_pytest, ) test_runner_pex_request = pex_request( output_filename="test_runner.pex", entry_point="pytest:main", interpreter_constraints=interpreter_constraints, additional_args=( "--pex-path", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any pytest transitive requirements will shadow corresponding user # requirements which will lead to problems when APIs that are used by either # `pytest:main` or the tests themselves break between the two versions. ":".join((pytest_pex_request.output_filename, requirements_pex_request.output_filename)), ), ) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. specified_source_files_request = SpecifiedSourceFilesRequest( [(config.sources, config.origin)], strip_source_roots=True) # TODO(John Sirois): Support exploiting concurrency better: # https://github.com/pantsbuild/pants/issues/9294 # Some awkward code follows in order to execute 5-6 items concurrently given the current state # of MultiGet typing / API. Improve this since we should encourage full concurrency in general. requests: List[Get[Any]] = [ Get[Pex](PexRequest, pytest_pex_request), Get[Pex](PexFromTargetsRequest, requirements_pex_request), Get[Pex](PexRequest, test_runner_pex_request), Get[ImportablePythonSources](Targets(python_targets + resource_targets)), Get[SourceFiles](SpecifiedSourceFilesRequest, specified_source_files_request), ] if run_coverage: requests.append( Get[CoverageConfig](CoverageConfigRequest(Targets(python_targets), is_test_time=True)), ) ( pytest_pex, requirements_pex, test_runner_pex, prepared_sources, specified_source_files, *rest, ) = cast( Union[Tuple[Pex, Pex, Pex, ImportablePythonSources, SourceFiles], Tuple[Pex, Pex, Pex, ImportablePythonSources, SourceFiles, CoverageConfig], ], await MultiGet(requests), ) directories_to_merge = [ prepared_sources.snapshot.directory_digest, requirements_pex.directory_digest, pytest_pex.directory_digest, test_runner_pex.directory_digest, ] if run_coverage: coverage_config = rest[0] directories_to_merge.append(coverage_config.digest) merged_input_files = await Get[Digest]( DirectoriesToMerge(directories=tuple(directories_to_merge))) coverage_args = [] if run_coverage: coverage_args = [ "--cov-report=", # To not generate any output. https://pytest-cov.readthedocs.io/en/latest/config.html ] for package in config.coverage.determine_packages_to_cover( specified_source_files=specified_source_files): coverage_args.extend(["--cov", package]) specified_source_file_names = sorted(specified_source_files.snapshot.files) return TestTargetSetup( test_runner_pex=test_runner_pex, args=(*pytest.options.args, *coverage_args, *specified_source_file_names), input_files_digest=merged_input_files, timeout_seconds=config.timeout.calculate_from_global_options(pytest), )
async def setup_pytest_for_target( field_set: PythonTestFieldSet, pytest: PyTest, test_subsystem: TestSubsystem, python_setup: PythonSetup, coverage_config: CoverageConfig, coverage_subsystem: CoverageSubsystem, ) -> TestTargetSetup: test_addresses = Addresses((field_set.address, )) transitive_targets = await Get(TransitiveTargets, Addresses, test_addresses) all_targets = transitive_targets.closure interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (tgt[PythonInterpreterCompatibility] for tgt in all_targets if tgt.has_field(PythonInterpreterCompatibility)), python_setup, ) # NB: We set `--not-zip-safe` because Pytest plugin discovery, which uses # `importlib_metadata` and thus `zipp`, does not play nicely when doing import magic directly # from zip files. `zipp` has pathologically bad behavior with large zipfiles. # TODO: this does have a performance cost as the pex must now be expanded to disk. Long term, # it would be better to fix Zipp (whose fix would then need to be used by importlib_metadata # and then by Pytest). See https://github.com/jaraco/zipp/pull/26. additional_args_for_pytest = ("--not-zip-safe", ) pytest_pex_request = Get( Pex, PexRequest( output_filename="pytest.pex", requirements=PexRequirements(pytest.get_requirement_strings()), interpreter_constraints=interpreter_constraints, additional_args=additional_args_for_pytest, internal_only=True, ), ) # Defaults to zip_safe=False. requirements_pex_request = Get( Pex, PexFromTargetsRequest, PexFromTargetsRequest.for_requirements(test_addresses, internal_only=True), ) test_runner_pex_request = Get( Pex, PexRequest( interpreter_constraints=interpreter_constraints, output_filename="test_runner.pex", entry_point="pytest:main", additional_args=( "--pex-path", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any pytest transitive requirements will shadow corresponding user # requirements which will lead to problems when APIs that are used by either # `pytest:main` or the tests themselves break between the two versions. ":".join(( pytest_pex_request.subject.output_filename, requirements_pex_request.subject.output_filename, )), ), internal_only=True, ), ) prepared_sources_request = Get( PythonSourceFiles, PythonSourceFilesRequest(all_targets, include_files=True)) # Get the file names for the test_target so that we can specify to Pytest precisely which files # to test, rather than using auto-discovery. field_set_source_files_request = Get( SourceFiles, SourceFilesRequest([field_set.sources])) ( pytest_pex, requirements_pex, test_runner_pex, prepared_sources, field_set_source_files, ) = await MultiGet( pytest_pex_request, requirements_pex_request, test_runner_pex_request, prepared_sources_request, field_set_source_files_request, ) input_digest = await Get( Digest, MergeDigests(( coverage_config.digest, prepared_sources.source_files.snapshot.digest, requirements_pex.digest, pytest_pex.digest, test_runner_pex.digest, )), ) coverage_args = [] if test_subsystem.use_coverage: cov_paths = coverage_subsystem.filter if coverage_subsystem.filter else ( ".", ) coverage_args = [ "--cov-report=", # Turn off output. *itertools.chain.from_iterable(["--cov", cov_path] for cov_path in cov_paths), ] return TestTargetSetup( test_runner_pex=test_runner_pex, args=(*pytest.options.args, *coverage_args, *field_set_source_files.files), input_digest=input_digest, source_roots=prepared_sources.source_roots, timeout_seconds=field_set.timeout.calculate_from_global_options( pytest), xml_dir=pytest.options.junit_xml_dir, junit_family=pytest.options.junit_family, execution_slot_variable=pytest.options.execution_slot_var, )