def test_address_specs_filter_by_exclude_pattern( address_specs_rule_runner: RuleRunner) -> None: address_specs_rule_runner.set_options( ["--exclude-target-regexp=exclude_me.*"]) address_specs_rule_runner.create_file("demo/f.txt") address_specs_rule_runner.add_to_build_file( "demo", dedent("""\ mock_tgt(name="exclude_me", sources=["f.txt"]) mock_tgt(name="not_me", sources=["f.txt"]) """), ) assert resolve_address_specs(address_specs_rule_runner, [SiblingAddresses("demo")]) == { AddressWithOrigin( Address("demo", target_name="not_me"), SiblingAddresses("demo")) } # The same filtering should work when given literal addresses, including file addresses. # The filtering will operate against the normalized Address.spec. literals_result = resolve_address_specs( address_specs_rule_runner, [ AddressLiteralSpec("demo", "exclude_me"), AddressLiteralSpec("demo", "not_me"), AddressLiteralSpec("demo/f.txt", "exclude_me"), AddressLiteralSpec("demo/f.txt", "not_me"), ], ) assert literals_result == { AddressWithOrigin( Address("demo", relative_file_path="f.txt", target_name="not_me"), AddressLiteralSpec("demo/f.txt", "not_me"), ), AddressWithOrigin(Address("demo", target_name="not_me"), AddressLiteralSpec("demo", "not_me")), }
def test_address_specs_filter_by_tag( address_specs_rule_runner: RuleRunner) -> None: address_specs_rule_runner.set_options(["--tag=+integration"]) address_specs_rule_runner.create_file("demo/f.txt") address_specs_rule_runner.add_to_build_file( "demo", dedent("""\ mock_tgt(name="a", sources=["f.txt"]) mock_tgt(name="b", sources=["f.txt"], tags=["integration"]) mock_tgt(name="c", sources=["f.txt"], tags=["ignore"]) """), ) assert resolve_address_specs(address_specs_rule_runner, [SiblingAddresses("demo")]) == { AddressWithOrigin( Address("demo", target_name="b"), SiblingAddresses("demo")) } # The same filtering should work when given literal addresses, including file addresses. # For file addresses, we look up the `tags` field of the original base target. literals_result = resolve_address_specs( address_specs_rule_runner, [ AddressLiteralSpec("demo", "a"), AddressLiteralSpec("demo", "b"), AddressLiteralSpec("demo", "c"), AddressLiteralSpec("demo/f.txt", "a"), AddressLiteralSpec("demo/f.txt", "b"), AddressLiteralSpec("demo/f.txt", "c"), ], ) assert literals_result == { AddressWithOrigin( Address("demo", relative_file_path="f.txt", target_name="b"), AddressLiteralSpec("demo/f.txt", "b"), ), AddressWithOrigin(Address("demo", target_name="b"), AddressLiteralSpec("demo", "b")), }
def test_filter_targets(rule_runner: RuleRunner) -> None: class MockTarget(Target): alias = "target" core_fields = (Sources, ) class MockTargetWithNoSourcesField(Target): alias = "no_sources" core_fields = () rule_runner.create_file("f1.txt") valid_tgt = MockTarget({Sources.alias: ["f1.txt"]}, address=Address("", target_name="valid")) empty_tgt = MockTarget({}, address=Address("", target_name="empty")) invalid_tgt = MockTargetWithNoSourcesField({}, address=Address( "", target_name="invalid")) result = rule_runner.request( TargetsWithSources, [TargetsWithSourcesRequest([valid_tgt, empty_tgt, invalid_tgt])], ) assert tuple(result) == (valid_tgt, )
def test_transitive_dependencies(rule_runner: RuleRunner) -> None: rule_runner.create_file(f"{PACKAGE}/util/__init__.py") rule_runner.create_file( f"{PACKAGE}/util/lib.py", dedent( """\ def capitalize(v: str) -> str: return v.capitalize() """ ), ) rule_runner.add_to_build_file(f"{PACKAGE}/util", "python_library()") rule_runner.create_file(f"{PACKAGE}/math/__init__.py") rule_runner.create_file( f"{PACKAGE}/math/add.py", dedent( """\ from project.util.lib import capitalize def add(x: int, y: int) -> str: sum = x + y return capitalize(sum) # This is the wrong type. """ ), ) rule_runner.add_to_build_file( f"{PACKAGE}/math", "python_library()", ) sources_content = [ FileContent( f"{PACKAGE}/app.py", dedent( """\ from project.math.add import add print(add(2, 4)) """ ).encode(), ), FileContent(f"{PACKAGE}/__init__.py", b""), ] target = make_target(rule_runner, sources_content) result = run_mypy(rule_runner, [target]) assert len(result) == 1 assert result[0].exit_code == 1 assert f"{PACKAGE}/math/add.py:5" in result[0].stdout
def test_binary_shorthand(chroot_rule_runner: RuleRunner) -> None: chroot_rule_runner.create_file("src/python/project/app.py") chroot_rule_runner.add_to_build_file( "src/python/project", textwrap.dedent( """ python_library() pex_binary(name='bin', entry_point='app.py:func') python_distribution( name='dist', provides=setup_py( name='bin', version='1.1.1' ), entry_points={ "console_scripts":{ "foo": ":bin", }, }, ) """ ), ) assert_chroot( chroot_rule_runner, ["project/app.py", "setup.py", "MANIFEST.in"], "setup.py", { "name": "bin", "version": "1.1.1", "plugin_demo": "hello world", "packages": ("project",), "namespace_packages": (), "install_requires": (), "package_data": {}, "entry_points": {"console_scripts": ["foo = project.app:func"]}, }, Address("src/python/project", target_name="dist"), )
def test_infer_python_inits() -> None: rule_runner = RuleRunner( rules=[ *ancestor_files.rules(), *target_types_rules.rules(), infer_python_init_dependencies, SubsystemRule(PythonInferSubsystem), QueryRule(InferredDependencies, (InferInitDependencies, )), ], target_types=[PythonSourcesGeneratorTarget], ) rule_runner.set_options( [ "--backend-packages=pants.backend.python", "--python-infer-inits", "--source-root-patterns=src/python", ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) rule_runner.create_file("src/python/root/__init__.py") rule_runner.add_to_build_file("src/python/root", "python_sources()") rule_runner.create_file("src/python/root/mid/__init__.py") rule_runner.add_to_build_file("src/python/root/mid", "python_sources()") rule_runner.create_file("src/python/root/mid/leaf/__init__.py") rule_runner.create_file("src/python/root/mid/leaf/f.py") rule_runner.add_to_build_file("src/python/root/mid/leaf", "python_sources()") def run_dep_inference(address: Address) -> InferredDependencies: target = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferInitDependencies(target[PythonSourceField])], ) assert run_dep_inference( Address( "src/python/root/mid/leaf", relative_file_path="f.py")) == InferredDependencies([ Address("src/python/root", relative_file_path="__init__.py"), Address("src/python/root/mid", relative_file_path="__init__.py"), Address("src/python/root/mid/leaf", relative_file_path="__init__.py"), ], )
def test_infer_python_conftests() -> None: rule_runner = RuleRunner( rules=[ *ancestor_files.rules(), *target_types_rules.rules(), infer_python_conftest_dependencies, SubsystemRule(PythonInferSubsystem), QueryRule(InferredDependencies, (InferConftestDependencies, )), ], target_types=[ PythonTestsGeneratorTarget, PythonTestUtilsGeneratorTarget ], ) rule_runner.set_options( ["--source-root-patterns=src/python"], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) rule_runner.create_file("src/python/root/conftest.py") rule_runner.add_to_build_file("src/python/root", "python_test_utils()") rule_runner.create_file("src/python/root/mid/conftest.py") rule_runner.add_to_build_file("src/python/root/mid", "python_test_utils()") rule_runner.create_file("src/python/root/mid/leaf/conftest.py") rule_runner.create_file("src/python/root/mid/leaf/this_is_a_test.py") rule_runner.add_to_build_file( "src/python/root/mid/leaf", "python_test_utils()\npython_tests(name='tests')") def run_dep_inference(address: Address) -> InferredDependencies: target = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferConftestDependencies(target[PythonSourceField])], ) assert run_dep_inference( Address( "src/python/root/mid/leaf", target_name="tests", relative_file_path="this_is_a_test.py")) == InferredDependencies([ Address("src/python/root", relative_file_path="conftest.py"), Address("src/python/root/mid", relative_file_path="conftest.py"), Address("src/python/root/mid/leaf", relative_file_path="conftest.py"), ], )
def test_expand_interpreter_search_paths(rule_runner: RuleRunner) -> None: local_pyenv_version = "3.5.5" all_pyenv_versions = ["2.7.14", local_pyenv_version] rule_runner.create_file(".python-version", local_pyenv_version + "\n") with environment_as(PATH="/env/path1:/env/path2"): with setup_pexrc_with_pex_python_path(["/pexrc/path1:/pexrc/path2"]): with fake_pyenv_root(all_pyenv_versions, local_pyenv_version) as ( pyenv_root, expected_pyenv_paths, expected_pyenv_local_paths, ): paths = [ "/foo", "<PATH>", "/bar", "<PEXRC>", "/baz", "<PYENV>", "<PYENV_LOCAL>", "/qux", ] expanded_paths = PythonSetup.expand_interpreter_search_paths( paths, pyenv_root_func=lambda: pyenv_root ) expected = [ "/foo", "/env/path1", "/env/path2", "/bar", "/pexrc/path1", "/pexrc/path2", "/baz", *expected_pyenv_paths, *expected_pyenv_local_paths, "/qux", ] assert expected == expanded_paths
def test_map_first_party_modules_to_addresses(rule_runner: RuleRunner) -> None: rule_runner.set_options( ["--source-root-patterns=['src/python', 'tests/python', 'build-support']"] ) # Two modules belonging to the same target. We should generate subtargets for each file. rule_runner.create_files("src/python/project/util", ["dirutil.py", "tarutil.py"]) rule_runner.add_to_build_file("src/python/project/util", "python_library()") # A module with two owners, meaning that neither should be resolved. rule_runner.create_file("src/python/two_owners.py") rule_runner.add_to_build_file("src/python", "python_library()") rule_runner.create_file("build-support/two_owners.py") rule_runner.add_to_build_file("build-support", "python_library()") # A package module. Because there's only one source file belonging to the target, we should # not generate subtargets. rule_runner.create_file("tests/python/project_test/demo_test/__init__.py") rule_runner.add_to_build_file("tests/python/project_test/demo_test", "python_library()") # A module with both an implementation and a type stub. rule_runner.create_files("src/python/stubs", ["stub.py", "stub.pyi"]) rule_runner.add_to_build_file("src/python/stubs", "python_library()") # Check that plugin mappings work. Note that we duplicate one of the files with a normal # python_library(), which means neither the Protobuf nor Python targets should be used. rule_runner.create_files("src/python/protos", ["f1.proto", "f2.proto", "f2_pb2.py"]) rule_runner.add_to_build_file( "src/python/protos", dedent( """\ protobuf_library(name='protos') python_library(name='py') """ ), ) result = rule_runner.request(FirstPartyPythonModuleMapping, []) assert result == FirstPartyPythonModuleMapping( { "project.util.dirutil": ( Address("src/python/project/util", relative_file_path="dirutil.py"), ), "project.util.tarutil": ( Address("src/python/project/util", relative_file_path="tarutil.py"), ), "project_test.demo_test": ( Address("tests/python/project_test/demo_test", relative_file_path="__init__.py"), ), "protos.f1_pb2": ( Address("src/python/protos", relative_file_path="f1.proto", target_name="protos"), ), "stubs.stub": ( Address("src/python/stubs", relative_file_path="stub.py"), Address("src/python/stubs", relative_file_path="stub.pyi"), ), } )
def test_filter_field_sets(rule_runner: RuleRunner) -> None: @dataclass(frozen=True) class MockFieldSet(FieldSet): sources: Sources # Another field to demo that we will preserve the whole FieldSet data structure. tags: Tags rule_runner.create_file("f1.txt") valid_addr = Address("", target_name="valid") valid_field_set = MockFieldSet(valid_addr, Sources(["f1.txt"], address=valid_addr), Tags(None, address=valid_addr)) empty_addr = Address("", target_name="empty") empty_field_set = MockFieldSet(empty_addr, Sources(None, address=empty_addr), Tags(None, address=empty_addr)) result = rule_runner.request( FieldSetsWithSources, [FieldSetsWithSourcesRequest([valid_field_set, empty_field_set])], ) assert tuple(result) == (valid_field_set, )
def create_python_library( rule_runner: RuleRunner, source_files: List[FileContent], *, name: str = "library", dependencies: Optional[List[str]] = None, ) -> None: for source_file in source_files: rule_runner.create_file(source_file.path, source_file.content.decode()) source_globs = [ PurePath(source_file.path).name for source_file in source_files ] rule_runner.add_to_build_file( PACKAGE, dedent(f"""\ python_library( name={repr(name)}, sources={source_globs}, dependencies={[*(dependencies or ())]}, ) """), ) rule_runner.create_file(os.path.join(PACKAGE, "__init__.py"))
def create_test_target( rule_runner: RuleRunner, source_files: List[FileContent], *, name: str = "tests", dependencies: Optional[List[str]] = None, interpreter_constraints: Optional[str] = None, ) -> PythonTests: for source_file in source_files: rule_runner.create_file(source_file.path, source_file.content.decode()) rule_runner.add_to_build_file( relpath=PACKAGE, target=dedent(f"""\ python_tests( name={repr(name)}, dependencies={dependencies or []}, compatibility={[interpreter_constraints] if interpreter_constraints else []}, ) """), ) tgt = rule_runner.get_target(Address(PACKAGE, target_name=name)) assert isinstance(tgt, PythonTests) return tgt
def test_infer_python_conftests() -> None: rule_runner = RuleRunner( rules=[ *ancestor_files.rules(), infer_python_conftest_dependencies, SubsystemRule(PythonInferSubsystem), QueryRule(InferredDependencies, (InferConftestDependencies, )), ], target_types=[PythonTests], ) rule_runner.set_options( [ "--backend-packages=pants.backend.python", "--source-root-patterns=src/python" ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) rule_runner.create_file("src/python/root/conftest.py") rule_runner.add_to_build_file("src/python/root", "python_tests()") rule_runner.create_file("src/python/root/mid/conftest.py") rule_runner.add_to_build_file("src/python/root/mid", "python_tests()") rule_runner.create_file("src/python/root/mid/leaf/conftest.py") rule_runner.create_file("src/python/root/mid/leaf/this_is_a_test.py") rule_runner.add_to_build_file("src/python/root/mid/leaf", "python_tests()") def run_dep_inference(address: Address) -> InferredDependencies: target = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferConftestDependencies(target[PythonSources])], ) assert run_dep_inference( Address("src/python/root/mid/leaf")) == InferredDependencies( [ Address("src/python/root", relative_file_path="conftest.py", target_name="root"), Address("src/python/root/mid", relative_file_path="conftest.py", target_name="mid"), ], sibling_dependencies_inferrable=False, )
def run_black( rule_runner: RuleRunner, targets: list[Target], *, config: str | None = None, passthrough_args: str | None = None, skip: bool = False, version: str | None = None, ) -> Tuple[Sequence[LintResult], FmtResult]: args = ["--backend-packages=pants.backend.python.lint.black"] if config is not None: rule_runner.create_file(relpath="pyproject.toml", contents=config) args.append("--black-config=pyproject.toml") if passthrough_args: args.append(f"--black-args='{passthrough_args}'") if skip: args.append("--black-skip") if version: args.append(f"--black-version={version}") rule_runner.set_options(args) field_sets = [BlackFieldSet.create(tgt) for tgt in targets] lint_results = rule_runner.request(LintResults, [BlackRequest(field_sets)]) input_sources = rule_runner.request( SourceFiles, [ SourceFilesRequest(field_set.sources for field_set in field_sets), ], ) fmt_result = rule_runner.request( FmtResult, [ BlackRequest(field_sets, prior_formatter_result=input_sources.snapshot), ], ) return lint_results.results, fmt_result
def test_runtime_package_dependency(rule_runner: RuleRunner) -> None: create_pex_binary_target(rule_runner, BINARY_SOURCE) rule_runner.create_file( f"{PACKAGE}/test_binary_call.py", dedent( f"""\ import os.path import subprocess def test_embedded_binary(): assert b"Hello, test!" in subprocess.check_output(args=['./bin.pex']) # Ensure that we didn't accidentally pull in the binary's sources. This is a # special type of dependency that should not be included with the rest of the # normal dependencies. assert os.path.exists("{BINARY_SOURCE.path}") is False """ ), ) rule_runner.add_to_build_file(PACKAGE, "python_tests(runtime_package_dependencies=[':bin'])") tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="test_binary_call.py")) assert isinstance(tgt, PythonTests) result = run_pytest(rule_runner, tgt, passthrough_args="-s") assert result.exit_code == 0
def test_mypy_shadows_requirements(rule_runner: RuleRunner) -> None: """Test the behavior of a MyPy requirement shadowing a user's requirement. The way we load requirements is complex. We want to ensure that things still work properly in this edge case. """ rule_runner.create_file("app.py", "import typed_ast\n") rule_runner.add_to_build_file( "", dedent("""\ python_requirement_library( name='typed-ast', requirements=['typed-ast==1.4.1'], ) python_library(name="lib") """), ) tgt = rule_runner.get_target(Address("", target_name="lib")) result = run_mypy(rule_runner, [tgt], additional_args=["--mypy-version=mypy==0.782"]) assert len(result) == 1 assert result[0].exit_code == 0 assert "Success: no issues found" in result[0].stdout
def assert_imports_parsed( rule_runner: RuleRunner, content: Optional[str], *, expected_explicit: List[str], expected_string: List[str], filename: str = "project/foo.py", constraints: str = ">=3.6", ): if content: rule_runner.create_file(filename, content) rule_runner.set_options([], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) rule_runner.add_to_build_file("project", "python_library(sources=['**/*.py'])") tgt = rule_runner.get_target(Address("project")) imports = rule_runner.request( ParsedPythonImports, [ ParsePythonImportsRequest(tgt[PythonSources], PexInterpreterConstraints([constraints])) ], ) assert set(imports.explicit_imports) == set(expected_explicit) assert set(imports.string_imports) == set(expected_string)
def test_fingerprint_dir(rule_runner: RuleRunner) -> None: d1 = rule_runner.create_dir("a") d2 = rule_runner.create_dir("b") d3 = rule_runner.create_dir("c") for f, c in [ ("a/bar/bar.config", "blah blah blah"), ("a/foo/foo.config", "meow meow meow"), ("b/foo/foo.config", "meow meow meow"), ("b/bar/bar.config", "blah blah blah"), ("c/bar/bar.config", "blah meow blah"), ]: rule_runner.create_file(f, contents=c) dp1 = OptionsFingerprinter().fingerprint(dir_option, [d1]) dp2 = OptionsFingerprinter().fingerprint(dir_option, [d1, d2]) dp3 = OptionsFingerprinter().fingerprint(dir_option, [d2, d1]) dp4 = OptionsFingerprinter().fingerprint(dir_option, [d3]) assert dp1 == dp1 assert dp2 == dp2 assert dp1 != dp3 assert dp1 != dp4 assert dp2 != dp3
def test_fingerprint_dict_with_file_content_change( rule_runner: RuleRunner) -> None: f1, f2 = (rule_runner.create_file(f, contents=c) for (f, c) in ( ("foo/bar.config", "blah blah blah"), ("foo/bar.config", "meow meow meow"), )) fp1 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f1},{f2}"}) with open(f1, "w") as f: f.write("123") fp2 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f1},{f2}"}) assert fp1 != fp2
def test_warn_files_targets(rule_runner: RuleRunner, caplog) -> None: rule_runner.create_file("assets/f.txt") rule_runner.add_to_build_file( "assets", dedent("""\ files(name='files', sources=['f.txt']) relocated_files( name='relocated', files_targets=[':files'], src='assets', dest='new_assets', ) # Resources are fine. resources(name='resources', sources=['f.txt']) """), ) rule_runner.create_file("src/py/project/__init__.py") rule_runner.create_file( "src/py/project/app.py", """\ def handler(event, context): print('Hello, World!') """, ) rule_runner.add_to_build_file( "src/py/project", dedent("""\ python_library( name='lib', dependencies=['assets:files', 'assets:relocated', 'assets:resources'], ) python_awslambda( name='lambda', dependencies=[':lib'], handler='foo.bar.hello_world:handler', runtime='python3.7', ) """), ) assert not caplog.records zip_file_relpath, _ = create_python_awslambda( rule_runner, Address("src/py/project", target_name="lambda")) assert caplog.records assert "src.py.project/lambda.zip" == zip_file_relpath assert ( "The python_awslambda target src/py/project:lambda transitively depends on" in caplog.text) assert "assets/f.txt:files" in caplog.text assert "assets:relocated" in caplog.text assert "assets:resources" not in caplog.text
def test_infer_python_inits() -> None: rule_runner = RuleRunner( rules=[ *ancestor_files.rules(), infer_python_init_dependencies, SubsystemRule(PythonInference), QueryRule(InferredDependencies, (InferInitDependencies, )), ], target_types=[PythonLibrary], ) rule_runner.set_options([ "--backend-packages=pants.backend.python", "--python-infer-inits", "--source-root-patterns=src/python", ]) rule_runner.create_file("src/python/root/__init__.py") rule_runner.add_to_build_file("src/python/root", "python_library()") rule_runner.create_file("src/python/root/mid/__init__.py") rule_runner.add_to_build_file("src/python/root/mid", "python_library()") rule_runner.create_file("src/python/root/mid/leaf/__init__.py") rule_runner.add_to_build_file("src/python/root/mid/leaf", "python_library()") def run_dep_inference(address: Address) -> InferredDependencies: target = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferInitDependencies(target[PythonSources])], ) assert run_dep_inference( Address("src/python/root/mid/leaf")) == InferredDependencies( [ Address("src/python/root", relative_file_path="__init__.py", target_name="root"), Address("src/python/root/mid", relative_file_path="__init__.py", target_name="mid"), ], sibling_dependencies_inferrable=False, )
def test_warn_files_targets(rule_runner: RuleRunner, caplog) -> None: rule_runner.set_options([], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) rule_runner.create_file("assets/f.txt") rule_runner.add_to_build_file( "assets", dedent("""\ files(name='files', sources=['f.txt']) relocated_files( name='relocated', files_targets=[':files'], src='assets', dest='new_assets', ) # Resources are fine. resources(name='resources', sources=['f.txt']) """), ) rule_runner.create_file("src/py/project/__init__.py") rule_runner.create_file("src/py/project/app.py", "print('hello')") rule_runner.add_to_build_file( "src/py/project", dedent("""\ pex_binary( dependencies=['assets:files', 'assets:relocated', 'assets:resources'], entry_point="none", ) """), ) tgt = rule_runner.get_target(Address("src/py/project")) field_set = PexBinaryFieldSet.create(tgt) assert not caplog.records result = rule_runner.request(BuiltPackage, [field_set]) assert caplog.records assert f"The pex_binary target {tgt.address} transitively depends on" in caplog.text assert "assets/f.txt:files" in caplog.text assert "assets:relocated" in caplog.text assert "assets:resources" not in caplog.text assert len(result.artifacts) == 1 assert result.artifacts[0].relpath == "src.py.project/project.pex"
def test_map_first_party_modules_to_addresses(rule_runner: RuleRunner) -> None: rule_runner.set_options( ["--source-root-patterns=['root1', 'root2', 'root3']"]) # Two proto files belonging to the same target. We should use two file addresses. rule_runner.create_files("root1/protos", ["f1.proto", "f2.proto"]) rule_runner.add_to_build_file("root1/protos", "protobuf_library()") # These protos would result in the same module name, so neither should be used. rule_runner.create_file("root1/two_owners/f.proto") rule_runner.add_to_build_file("root1/two_owners", "protobuf_library()") rule_runner.create_file("root2/two_owners/f.proto") rule_runner.add_to_build_file("root2/two_owners", "protobuf_library()") # A file with grpc. This also uses the `python_source_root` mechanism, which should be # irrelevant to the module mapping because we strip source roots. rule_runner.create_file("root1/tests/f.proto") rule_runner.add_to_build_file( "root1/tests", "protobuf_library(grpc=True, python_source_root='root3')") result = rule_runner.request(FirstPartyPythonMappingImpl, [PythonProtobufMappingMarker()]) assert result == FirstPartyPythonMappingImpl( mapping=FrozenDict({ "protos.f1_pb2": (Address("root1/protos", relative_file_path="f1.proto"), ), "protos.f2_pb2": (Address("root1/protos", relative_file_path="f2.proto"), ), "tests.f_pb2": (Address("root1/tests", relative_file_path="f.proto"), ), "tests.f_pb2_grpc": (Address("root1/tests", relative_file_path="f.proto"), ), }), ambiguous_modules=FrozenDict({ "two_owners.f_pb2": ( Address("root1/two_owners", relative_file_path="f.proto"), Address("root2/two_owners", relative_file_path="f.proto"), ) }), )
def test_dependency_inference(rule_runner: RuleRunner) -> None: rule_runner.create_file( "src/protos/project/f1.proto", dedent("""\ import 'tests/f.proto'; import 'unrelated_path/foo.proto"; """), ) rule_runner.create_file("src/protos/project/f2.proto", "import 'project/f1.proto';") rule_runner.add_to_build_file("src/protos/project", "protobuf_library()") rule_runner.create_file("src/protos/tests/f.proto") rule_runner.add_to_build_file("src/protos/tests", "protobuf_library()") def run_dep_inference(address: Address) -> InferredDependencies: rule_runner.set_options([ "--backend-packages=pants.backend.codegen.protobuf.python", "--source-root-patterns=['src/protos']", ]) target = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferProtobufDependencies(target[ProtobufSources])]) build_address = Address("src/protos/project") assert run_dep_inference(build_address) == InferredDependencies( [ Address("src/protos/tests", relative_file_path="f.proto"), Address("src/protos/project", relative_file_path="f1.proto"), ], sibling_dependencies_inferrable=True, ) file_address = Address("src/protos/project", relative_file_path="f1.proto") assert run_dep_inference(file_address) == InferredDependencies( [Address("src/protos/tests", relative_file_path="f.proto")], sibling_dependencies_inferrable=True, )
def test_strip_source_file_names(rule_runner: RuleRunner) -> None: def assert_stripped_source_file_names(address: Address, *, source_root: str, expected: List[str]) -> None: rule_runner.set_options([f"--source-root-patterns=['{source_root}']"]) tgt = rule_runner.get_target(address) result = rule_runner.request(StrippedSourceFileNames, [SourcesPathsRequest(tgt[Sources])]) assert set(result) == set(expected) rule_runner.create_file("src/java/com/project/example.java") rule_runner.add_to_build_file("src/java/com/project", "target(sources=['*.java'])") assert_stripped_source_file_names( Address("src/java/com/project"), source_root="src/java", expected=["com/project/example.java"], ) rule_runner.create_file("src/python/script.py") rule_runner.add_to_build_file("src/python", "target(sources=['*.py'])") assert_stripped_source_file_names(Address("src/python"), source_root="src/python", expected=["script.py"]) # Test a source root at the repo root. We have performance optimizations for this case # because there is nothing to strip. rule_runner.create_file("data.json") rule_runner.add_to_build_file("", "target(name='json', sources=['*.json'])\n") assert_stripped_source_file_names(Address("", target_name="json"), source_root="/", expected=["data.json"]) # Gracefully handle an empty sources field. rule_runner.add_to_build_file("", "target(name='empty', sources=[])") assert_stripped_source_file_names(Address("", target_name="empty"), source_root="/", expected=[])
def test_map_first_party_modules_to_addresses(rule_runner: RuleRunner) -> None: rule_runner.set_options([ "--source-root-patterns=['src/python', 'tests/python', 'build-support']" ]) # Two modules belonging to the same target. We should generate subtargets for each file. rule_runner.create_files("src/python/project/util", ["dirutil.py", "tarutil.py"]) rule_runner.add_to_build_file("src/python/project/util", "python_library()") # A module with two owners, meaning that neither should be resolved. rule_runner.create_file("src/python/two_owners.py") rule_runner.add_to_build_file("src/python", "python_library()") rule_runner.create_file("build-support/two_owners.py") rule_runner.add_to_build_file("build-support", "python_library()") # A package module. Because there's only one source file belonging to the target, we should # not generate subtargets. rule_runner.create_file("tests/python/project_test/demo_test/__init__.py") rule_runner.add_to_build_file("tests/python/project_test/demo_test", "python_library()") # A module with both an implementation and a type stub. rule_runner.create_files("src/python/stubs", ["stub.py", "stub.pyi"]) rule_runner.add_to_build_file("src/python/stubs", "python_library()") result = rule_runner.request(FirstPartyModuleToAddressMapping, []) assert result.mapping == FrozenDict({ "project.util.dirutil": (Address("src/python/project/util", relative_file_path="dirutil.py"), ), "project.util.tarutil": (Address("src/python/project/util", relative_file_path="tarutil.py"), ), "project_test.demo_test": (Address("tests/python/project_test/demo_test", relative_file_path="__init__.py"), ), "stubs.stub": ( Address("src/python/stubs", relative_file_path="stub.py"), Address("src/python/stubs", relative_file_path="stub.pyi"), ), })
def test_infer_python_imports(caplog) -> None: rule_runner = RuleRunner( rules=[ *import_rules(), *target_types_rules.rules(), QueryRule(InferredDependencies, [InferPythonImportDependencies]), ], target_types=[PythonSourcesGeneratorTarget, PythonRequirementTarget], ) rule_runner.add_to_build_file( "3rdparty/python", dedent("""\ python_requirement( name='Django', requirements=['Django==1.21'], ) """), ) # If there's a `.py` and `.pyi` file for the same module, we should infer a dependency on both. rule_runner.create_file("src/python/str_import/subdir/f.py") rule_runner.create_file("src/python/str_import/subdir/f.pyi") rule_runner.add_to_build_file("src/python/str_import/subdir", "python_sources()") rule_runner.create_file("src/python/util/dep.py") rule_runner.add_to_build_file("src/python/util", "python_sources()") rule_runner.create_file( "src/python/app.py", dedent("""\ import django import unrecognized.module from util.dep import Demo from util import dep """), ) rule_runner.create_file( "src/python/f2.py", dedent("""\ import typing # Import from another file in the same target. from app import main # Dynamic string import. importlib.import_module('str_import.subdir.f') """), ) rule_runner.add_to_build_file("src/python", "python_sources()") def run_dep_inference( address: Address, *, enable_string_imports: bool = False) -> InferredDependencies: args = [ "--backend-packages=pants.backend.python", "--source-root-patterns=src/python" ] if enable_string_imports: args.append("--python-infer-string-imports") rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"}) target = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferPythonImportDependencies(target[PythonSourceField])]) assert run_dep_inference(Address( "src/python", relative_file_path="app.py")) == InferredDependencies([ Address("3rdparty/python", target_name="Django"), Address("src/python/util", relative_file_path="dep.py"), ], ) addr = Address("src/python", relative_file_path="f2.py") assert run_dep_inference(addr) == InferredDependencies( [Address("src/python", relative_file_path="app.py")]) assert run_dep_inference( addr, enable_string_imports=True) == InferredDependencies([ Address("src/python", relative_file_path="app.py"), Address("src/python/str_import/subdir", relative_file_path="f.py"), Address("src/python/str_import/subdir", relative_file_path="f.pyi"), ], ) # Test handling of ambiguous imports. We should warn on the ambiguous dependency, but not warn # on the disambiguated one and should infer a dep. caplog.clear() rule_runner.create_files("src/python/ambiguous", ["dep.py", "disambiguated_via_ignores.py"]) rule_runner.create_file( "src/python/ambiguous/main.py", "import ambiguous.dep\nimport ambiguous.disambiguated_via_ignores\n", ) rule_runner.add_to_build_file( "src/python/ambiguous", dedent("""\ python_sources(name='dep1', sources=['dep.py', 'disambiguated_via_ignores.py']) python_sources(name='dep2', sources=['dep.py', 'disambiguated_via_ignores.py']) python_sources( name='main', sources=['main.py'], dependencies=['!./disambiguated_via_ignores.py:dep2'], ) """), ) assert run_dep_inference( Address("src/python/ambiguous", target_name="main", relative_file_path="main.py")) == InferredDependencies([ Address( "src/python/ambiguous", target_name="dep1", relative_file_path="disambiguated_via_ignores.py", ) ], ) assert len(caplog.records) == 1 assert "The target src/python/ambiguous/main.py:main imports `ambiguous.dep`" in caplog.text assert "['src/python/ambiguous/dep.py:dep1', 'src/python/ambiguous/dep.py:dep2']" in caplog.text assert "disambiguated_via_ignores.py" not in caplog.text
def test_infer_python_strict(caplog) -> None: rule_runner = RuleRunner( rules=[ *import_rules(), *target_types_rules.rules(), QueryRule(InferredDependencies, [InferPythonImportDependencies]), ], target_types=[ PythonSourcesGeneratorTarget, PythonRequirementTarget, PythonRequirementsFile, ], context_aware_object_factories={ "python_requirements": PythonRequirementsCAOF }, ) rule_runner.create_file( "src/python/cheesey.py", "import venezuelan_beaver_cheese", ) rule_runner.add_to_build_file("src/python", "python_sources()") def run_dep_inference( address: Address, unowned_dependency_behavior: str, ) -> InferredDependencies: rule_runner.set_options( [ "--backend-packages=pants.backend.python", f"--python-infer-unowned-dependency-behavior={unowned_dependency_behavior}", "--source-root-patterns=src/python", ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) target = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferPythonImportDependencies(target[PythonSourceField])], ) # First test with "warning" run_dep_inference(Address("src/python", relative_file_path="cheesey.py"), "warning") assert len(caplog.records) == 1 assert "The following imports in src/python/cheesey.py have no owners:" in caplog.text assert " * venezuelan_beaver_cheese" in caplog.text # Now test with "error" caplog.clear() with pytest.raises(ExecutionError) as exc_info: run_dep_inference( Address("src/python", relative_file_path="cheesey.py"), "error") assert isinstance(exc_info.value.wrapped_exceptions[0], UnownedDependencyError) assert len(caplog.records ) == 2 # one for the error being raised and one for our message assert "The following imports in src/python/cheesey.py have no owners:" in caplog.text assert " * venezuelan_beaver_cheese" in caplog.text caplog.clear() # All modes should be fine if the module is explictly declared as a requirement rule_runner.add_to_build_file( "src/python", dedent("""\ python_requirement( name="venezuelan_beaver_cheese", modules=["venezuelan_beaver_cheese"], requirements=["venezuelan_beaver_cheese==1.0.0"], ) python_sources(dependencies=[":venezuelan_beaver_cheese"]) """), overwrite=True, ) for mode in UnownedDependencyUsage: run_dep_inference( Address("src/python", relative_file_path="cheesey.py"), mode.value) assert not caplog.records rule_runner.add_to_build_file("src/python", "python_sources()", overwrite=True) # Cleanup # All modes should be fine if the module is implictly found via requirements.txt rule_runner.create_file("src/python/requirements.txt", "venezuelan_beaver_cheese==1.0.0") rule_runner.add_to_build_file( "src/python", dedent("""\ python_requirements() python_sources() """), overwrite=True, ) for mode in UnownedDependencyUsage: run_dep_inference( Address("src/python", relative_file_path="cheesey.py"), mode.value) assert not caplog.records # All modes should be fine if the module is owned by a first party rule_runner.create_file("src/python/venezuelan_beaver_cheese.py") rule_runner.add_to_build_file("src/python", "python_sources()", overwrite=True) for mode in UnownedDependencyUsage: run_dep_inference( Address("src/python", relative_file_path="cheesey.py"), mode.value) assert not caplog.records
def test_generate_chroot(chroot_rule_runner: RuleRunner) -> None: chroot_rule_runner.add_to_build_file( "src/python/foo/bar/baz", textwrap.dedent(""" python_distribution( name="baz-dist", dependencies=[':baz'], provides=setup_py( name='baz', version='1.1.1' ) ) python_library() """), ) chroot_rule_runner.create_file("src/python/foo/bar/baz/baz.py") chroot_rule_runner.add_to_build_file( "src/python/foo/qux", textwrap.dedent(""" python_library() pex_binary(name="bin", entry_point="foo.qux.bin:main") """), ) chroot_rule_runner.create_file("src/python/foo/qux/__init__.py") chroot_rule_runner.create_file("src/python/foo/qux/qux.py") # Add a `.pyi` stub file to ensure we include it in the final result. chroot_rule_runner.create_file("src/python/foo/qux/qux.pyi") chroot_rule_runner.add_to_build_file("src/python/foo/resources", 'resources(sources=["js/code.js"])') chroot_rule_runner.create_file("src/python/foo/resources/js/code.js") chroot_rule_runner.add_to_build_file("files", 'files(sources=["README.txt"])') chroot_rule_runner.create_file("files/README.txt") chroot_rule_runner.add_to_build_file( "src/python/foo", textwrap.dedent(""" python_distribution( name='foo-dist', dependencies=[ ':foo', ], provides=setup_py( name='foo', version='1.2.3' ).with_binaries( foo_main='src/python/foo/qux:bin' ) ) python_library( dependencies=[ 'src/python/foo/bar/baz', 'src/python/foo/qux', 'src/python/foo/resources', 'files', ] ) """), ) chroot_rule_runner.create_file("src/python/foo/__init__.py", _namespace_decl) chroot_rule_runner.create_file("src/python/foo/foo.py") assert_chroot( chroot_rule_runner, [ "src/files/README.txt", "src/foo/qux/__init__.py", "src/foo/qux/qux.py", "src/foo/qux/qux.pyi", "src/foo/resources/js/code.js", "src/foo/__init__.py", "src/foo/foo.py", "setup.py", "MANIFEST.in", ], { "name": "foo", "version": "1.2.3", "plugin_demo": "hello world", "package_dir": { "": "src" }, "packages": ("foo", "foo.qux"), "namespace_packages": ("foo", ), "package_data": { "foo": ("resources/js/code.js", ) }, "install_requires": ("baz==1.1.1", ), "entry_points": { "console_scripts": ["foo_main=foo.qux.bin:main"] }, }, Address("src/python/foo", target_name="foo-dist"), )
def test_fingerprint_file_outside_buildroot(tmp_path: Path, rule_runner: RuleRunner) -> None: outside_buildroot = rule_runner.create_file( (tmp_path / "foobar").as_posix(), contents="foobar") with pytest.raises(ValueError): OptionsFingerprinter().fingerprint(file_option, outside_buildroot)