def test_merge_interpreter_constraints() -> None: def assert_merged(*, inp: List[List[str]], expected: List[str]) -> None: assert PexInterpreterConstraints.merge_constraint_sets(inp) == expected # Multiple constraint sets get merged so that they are ANDed. # A & B => A & B assert_merged(inp=[["CPython==2.7.*"], ["CPython==3.6.*"]], expected=["CPython==2.7.*,==3.6.*"]) # Multiple constraints within a single constraint set are kept separate so that they are ORed. # A | B => A | B assert_merged(inp=[["CPython==2.7.*", "CPython==3.6.*"]], expected=["CPython==2.7.*", "CPython==3.6.*"]) # Input constraints already were ANDed. # A => A assert_merged(inp=[["CPython>=2.7,<3"]], expected=["CPython>=2.7,<3"]) # Both AND and OR. # (A | B) & C => (A & B) | (B & C) assert_merged( inp=[["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*"]], expected=["CPython>=2.7,<3,==3.6.*", "CPython>=3.5,==3.6.*"], ) # A & B & (C | D) => (A & B & C) | (A & B & D) assert_merged( inp=[["CPython==2.7.*"], ["CPython==3.6.*"], ["CPython==3.7.*", "CPython==3.8.*"]], expected=[ "CPython==2.7.*,==3.6.*,==3.7.*", "CPython==2.7.*,==3.6.*,==3.8.*" ], ) # (A | B) & (C | D) => (A & C) | (A & D) | (B & C) | (B & D) assert_merged( inp=[["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*", "CPython==3.7.*"]], expected=[ "CPython>=2.7,<3,==3.6.*", "CPython>=2.7,<3,==3.7.*", "CPython>=3.5,==3.6.*", "CPython>=3.5,==3.7.*", ], ) # A & (B | C | D) & (E | F) & G => # (A & B & E & G) | (A & B & F & G) | (A & C & E & G) | (A & C & F & G) | (A & D & E & G) | (A & D & F & G) assert_merged( inp=[ ["CPython==3.6.5"], ["CPython==2.7.14", "CPython==2.7.15", "CPython==2.7.16"], ["CPython>=3.6", "CPython==3.5.10"], ["CPython>3.8"], ], expected=[ "CPython==2.7.14,==3.5.10,==3.6.5,>3.8", "CPython==2.7.14,>=3.6,==3.6.5,>3.8", "CPython==2.7.15,==3.5.10,==3.6.5,>3.8", "CPython==2.7.15,>=3.6,==3.6.5,>3.8", "CPython==2.7.16,==3.5.10,==3.6.5,>3.8", "CPython==2.7.16,>=3.6,==3.6.5,>3.8", ], ) # Deduplicate between constraint_sets # (A | B) & (A | B) => A | B. Naively, this should actually resolve as follows: # (A | B) & (A | B) => (A & A) | (A & B) | (B & B) => A | (A & B) | B. # But, we first deduplicate each constraint_set. (A | B) & (A | B) can be rewritten as # X & X => X. assert_merged( inp=[["CPython==2.7.*", "CPython==3.6.*"], ["CPython==2.7.*", "CPython==3.6.*"]], expected=["CPython==2.7.*", "CPython==3.6.*"], ) # (A | B) & C & (A | B) => (A & C) | (B & C). Alternatively, this can be rewritten as # X & Y & X => X & Y. assert_merged( inp=[ ["CPython>=2.7,<3", "CPython>=3.5"], ["CPython==3.6.*"], ["CPython>=3.5", "CPython>=2.7,<3"], ], expected=["CPython>=2.7,<3,==3.6.*", "CPython>=3.5,==3.6.*"], ) # No specifiers assert_merged(inp=[["CPython"]], expected=["CPython"]) assert_merged(inp=[["CPython"], ["CPython==3.7.*"]], expected=["CPython==3.7.*"]) # No interpreter is shorthand for CPython, which is how Pex behaves assert_merged(inp=[[">=3.5"], ["CPython==3.7.*"]], expected=["CPython>=3.5,==3.7.*"]) # Different Python interpreters, which are guaranteed to fail when ANDed but are safe when ORed. with pytest.raises(ValueError): PexInterpreterConstraints.merge_constraint_sets([["CPython==3.7.*"], ["PyPy==43.0"]]) assert_merged(inp=[["CPython==3.7.*", "PyPy==43.0"]], expected=["CPython==3.7.*", "PyPy==43.0"]) # Ensure we can handle empty input. assert_merged(inp=[], expected=[])
def test_interpreter_constraints(self) -> None: constraints = PexInterpreterConstraints( ["CPython>=2.7,<3", "CPython>=3.6"]) pex_info = self.create_pex_and_get_pex_info( interpreter_constraints=constraints) assert set(pex_info["interpreter_constraints"]) == set(constraints)
async def setup( request: SetupRequest, black: Black, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> Setup: adaptors_with_origins = request.formatter.adaptors_with_origins requirements_pex = await Get[Pex]( CreatePex( output_filename="black.pex", requirements=PexRequirements(black.get_requirement_specs()), interpreter_constraints=PexInterpreterConstraints( black.default_interpreter_constraints ), entry_point=black.get_entry_point(), ) ) config_path: Optional[str] = black.options.config config_snapshot = await Get[Snapshot]( PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--black-config`", ) ) if request.formatter.prior_formatter_result is None: all_source_files = await Get[SourceFiles]( AllSourceFilesRequest( adaptor_with_origin.adaptor for adaptor_with_origin in adaptors_with_origins ) ) all_source_files_snapshot = all_source_files.snapshot else: all_source_files_snapshot = request.formatter.prior_formatter_result specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest(adaptors_with_origins) ) merged_input_files = await Get[Digest]( DirectoriesToMerge( directories=( all_source_files_snapshot.directory_digest, requirements_pex.directory_digest, config_snapshot.directory_digest, ) ), ) address_references = ", ".join( sorted( adaptor_with_origin.adaptor.address.reference() for adaptor_with_origin in adaptors_with_origins ) ) process_request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path="./black.pex", pex_args=generate_args( specified_source_files=specified_source_files, black=black, check_only=request.check_only, ), input_files=merged_input_files, output_files=all_source_files_snapshot.files, description=f"Run black for {address_references}", ) return Setup(process_request)
def assert_merged(*, inp: List[List[str]], expected: List[str]) -> None: assert PexInterpreterConstraints.merge_constraint_sets(inp) == expected
async def pylint_lint( configs: PylintConfigurations, pylint: Pylint, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if pylint.options.skip: return LintResult.noop() # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it # doesn't lint those direct dependencies nor does it care about transitive dependencies. addresses = [] for config in configs: addresses.append(config.address) addresses.extend(config.dependencies.value or ()) targets = await Get[Targets](Addresses(addresses)) chrooted_python_sources = await Get[ImportablePythonSources](Targets, targets) # NB: Pylint output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://pylint.pycqa.org/en/latest/faq.html#what-versions-of-python-is-pylint-supporting. interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (config.compatibility for config in configs), python_setup ) requirements_pex = await Get[Pex]( PexRequest( output_filename="pylint.pex", requirements=PexRequirements(pylint.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=pylint.get_entry_point(), ) ) config_path: Optional[str] = pylint.options.config config_snapshot = await Get[Snapshot]( PathGlobs( globs=tuple([config_path] if config_path else []), glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", ) ) merged_input_files = await Get[Digest]( DirectoriesToMerge( directories=( requirements_pex.directory_digest, config_snapshot.directory_digest, chrooted_python_sources.snapshot.directory_digest, ) ), ) specified_source_files = await Get[SourceFiles]( SpecifiedSourceFilesRequest( ((config.sources, config.origin) for config in configs), strip_source_roots=True ) ) address_references = ", ".join(sorted(config.address.reference() for config in configs)) request = requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./pylint.pex", pex_args=generate_args(specified_source_files=specified_source_files, pylint=pylint), input_files=merged_input_files, description=f"Run Pylint on {pluralize(len(configs), 'target')}: {address_references}.", ) result = await Get[FallibleProcessResult](Process, request) return LintResult.from_fallible_process_result(result)
async def pylint_lint( field_sets: PylintFieldSets, pylint: Pylint, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment, ) -> LintResult: if pylint.skip: return LintResult.noop() # Pylint needs direct dependencies in the chroot to ensure that imports are valid. However, it # doesn't lint those direct dependencies nor does it care about transitive dependencies. addresses_with_dependencies = [] for field_set in field_sets: addresses_with_dependencies.append(field_set.address) addresses_with_dependencies.extend(field_set.dependencies.value or ()) targets = await Get[Targets](Addresses(addresses_with_dependencies)) # NB: Pylint output depends upon which Python interpreter version it's run with. We ensure that # each target runs with its own interpreter constraints. See # http://pylint.pycqa.org/en/latest/faq.html#what-versions-of-python-is-pylint-supporting. interpreter_constraints = PexInterpreterConstraints.create_from_compatibility_fields( (field_set.compatibility for field_set in field_sets), python_setup) # We build one PEX with Pylint requirements and another with all direct 3rd-party dependencies. # Splitting this into two PEXes gives us finer-grained caching. We then merge via `--pex-path`. pylint_pex_request = Get[Pex](PexRequest( output_filename="pylint.pex", requirements=PexRequirements(pylint.get_requirement_specs()), interpreter_constraints=interpreter_constraints, entry_point=pylint.get_entry_point(), )) requirements_pex_request = Get[Pex](PexRequest( output_filename="requirements.pex", requirements=PexRequirements.create_from_requirement_fields( tgt[PythonRequirementsField] for tgt in targets if tgt.has_field(PythonRequirementsField)), interpreter_constraints=interpreter_constraints, )) pylint_runner_pex_request = Get[Pex]( PexRequest( output_filename="pylint_runner.pex", entry_point=pylint.get_entry_point(), interpreter_constraints=interpreter_constraints, additional_args=( "--pex-path", # TODO(John Sirois): Support shading python binaries: # https://github.com/pantsbuild/pants/issues/9206 # Right now any Pylint transitive requirements will shadow corresponding user # requirements which could lead to problems. ":".join(["pylint.pex", "requirements.pex"]), ), )) config_snapshot_request = Get[Snapshot](PathGlobs( globs=[pylint.config] if pylint.config else [], glob_match_error_behavior=GlobMatchErrorBehavior.error, description_of_origin="the option `--pylint-config`", )) prepare_python_sources_request = Get[ImportablePythonSources](Targets, targets) specified_source_files_request = Get[SourceFiles]( SpecifiedSourceFilesRequest( ((field_set.sources, field_set.origin) for field_set in field_sets), strip_source_roots=True, )) ( pylint_pex, requirements_pex, pylint_runner_pex, config_snapshot, prepared_python_sources, specified_source_files, ) = cast( Tuple[Pex, Pex, Pex, Snapshot, ImportablePythonSources, SourceFiles], await MultiGet([ pylint_pex_request, requirements_pex_request, pylint_runner_pex_request, config_snapshot_request, prepare_python_sources_request, specified_source_files_request, ]), ) input_digest = await Get[Digest](MergeDigests(( pylint_pex.digest, requirements_pex.digest, pylint_runner_pex.digest, config_snapshot.digest, prepared_python_sources.snapshot.digest, )), ) address_references = ", ".join( sorted(field_set.address.reference() for field_set in field_sets)) process = requirements_pex.create_process( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f"./pylint_runner.pex", pex_args=generate_args(specified_source_files=specified_source_files, pylint=pylint), input_digest=input_digest, description= f"Run Pylint on {pluralize(len(field_sets), 'target')}: {address_references}.", ) result = await Get[FallibleProcessResult](Process, process) return LintResult.from_fallible_process_result(result, linter_name="Pylint")
def run_python_test( test_target: PythonTestsAdaptor, pytest: PyTest, python_setup: PythonSetup, subprocess_encoding_environment: SubprocessEncodingEnvironment ) -> TestResult: """Runs pytest for one target.""" # TODO(7726): replace this with a proper API to get the `closure` for a # TransitiveHydratedTarget. transitive_hydrated_targets = yield Get( TransitiveHydratedTargets, BuildFileAddresses((test_target.address,)) ) all_targets = transitive_hydrated_targets.closure all_target_adaptors = tuple(t.adaptor for t in all_targets) # Get the file names for the test_target, adjusted for the source root. This allows us to # specify to Pytest which files to test and thus to avoid the test auto-discovery defined by # https://pytest.org/en/latest/goodpractices.html#test-discovery. In addition to a performance # optimization, this ensures that any transitive sources, such as a test project file named # test_fail.py, do not unintentionally end up being run as tests. source_root_stripped_test_target_sources = yield Get( SourceRootStrippedSources, Address, test_target.address.to_address() ) if not source_root_stripped_test_target_sources.snapshot.files: yield TestResult( status=Status.SUCCESS, stdout="", stderr="", ) source_root_stripped_sources = yield [ Get(SourceRootStrippedSources, HydratedTarget, target_adaptor) for target_adaptor in all_targets ] stripped_sources_digests = [stripped_sources.snapshot.directory_digest for stripped_sources in source_root_stripped_sources] sources_digest = yield Get( Digest, DirectoriesToMerge(directories=tuple(stripped_sources_digests)), ) inits_digest = yield Get(InjectedInitDigest, Digest, sources_digest) interpreter_constraints = PexInterpreterConstraints.create_from_adaptors( adaptors=tuple(all_target_adaptors), python_setup=python_setup ) output_pytest_requirements_pex_filename = 'pytest-with-requirements.pex' requirements = PexRequirements.create_from_adaptors( adaptors=all_target_adaptors, additional_requirements=pytest.get_requirement_strings() ) resolved_requirements_pex = yield Get( Pex, CreatePex( output_filename=output_pytest_requirements_pex_filename, requirements=requirements, interpreter_constraints=interpreter_constraints, entry_point="pytest:main", ) ) merged_input_files = yield Get( Digest, DirectoriesToMerge( directories=( sources_digest, inits_digest.directory_digest, resolved_requirements_pex.directory_digest, ) ), ) test_target_sources_file_names = sorted(source_root_stripped_test_target_sources.snapshot.files) request = resolved_requirements_pex.create_execute_request( python_setup=python_setup, subprocess_encoding_environment=subprocess_encoding_environment, pex_path=f'./{output_pytest_requirements_pex_filename}', pex_args=test_target_sources_file_names, input_files=merged_input_files, description=f'Run Pytest for {test_target.address.reference()}', # TODO(#8584): hook this up to TestRunnerTaskMixin so that we can configure the default timeout # and also use the specified max timeout time. timeout_seconds=getattr(test_target, 'timeout', 60) ) result = yield Get(FallibleExecuteProcessResult, ExecuteProcessRequest, request) status = Status.SUCCESS if result.exit_code == 0 else Status.FAILURE yield TestResult( status=status, stdout=result.stdout.decode(), stderr=result.stderr.decode(), )