def test_requirements_txt(rule_runner: RuleRunner) -> None: """This tests that we correctly create a new python_requirement for each entry in a requirements.txt file, where each dependency is unique. Some edge cases: * We ignore comments and options (values that start with `--`). * If a module_mapping is given, and the project is in the map, we copy over a subset of the mapping to the created target. It works regardless of capitalization. * Projects get normalized thanks to Requirement.parse(). """ assert_python_requirements( rule_runner, dedent("""\ python_requirements( module_mapping={'ansiCOLORS': ['colors']}, type_stubs_module_mapping={'Django-types': ['django']}, ) """), dedent("""\ # Comment. --find-links=https://duckduckgo.com ansicolors>=1.18.0 Django==3.2 ; python_version>'3' Django-types Un-Normalized-PROJECT # Inline comment. pip@ git+https://github.com/pypa/pip.git """), expected_file_dep=PythonRequirementsFile( {"sources": ["requirements.txt"]}, Address("", target_name="requirements.txt"), ), expected_targets=[ PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("ansicolors>=1.18.0")], "modules": ["colors"], }, Address("", target_name="ansicolors"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("Django==3.2 ; python_version>'3'")], }, Address("", target_name="Django"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("Django-types")], "type_stub_modules": ["django"], }, Address("", target_name="Django-types"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [PipRequirement.parse("Un_Normalized_PROJECT")], }, Address("", target_name="Un-Normalized-PROJECT"), ), PythonRequirementTarget( { "dependencies": [":requirements.txt"], "requirements": [ PipRequirement.parse( "pip@ git+https://github.com/pypa/pip.git") ], }, Address("", target_name="pip"), ), ], )
def make_target_with_origin(self, source_files: List[FileContent]) -> Target: for source_file in source_files: self.create_file(f"{source_file.path}", source_file.content.decode()) return PythonLibrary({}, address=Address.parse(":target"))
def test_source_plugin(rule_runner: RuleRunner) -> None: # NB: We make this source plugin fairly complex by having it use transitive dependencies. # This is to ensure that we can correctly support plugins with dependencies. # The plugin changes the return type of functions ending in `__overridden_by_plugin` to have a # return type of `None`. plugin_file = dedent( """\ from typing import Callable, Optional, Type from mypy.plugin import FunctionContext, Plugin from mypy.types import NoneType, Type as MyPyType from plugins.subdir.dep import is_overridable_function from project.subdir.util import noop noop() class ChangeReturnTypePlugin(Plugin): def get_function_hook( self, fullname: str ) -> Optional[Callable[[FunctionContext], MyPyType]]: return hook if is_overridable_function(fullname) else None def hook(ctx: FunctionContext) -> MyPyType: return NoneType() def plugin(_version: str) -> Type[Plugin]: return ChangeReturnTypePlugin """ ) rule_runner.write_files( { "BUILD": dedent( f"""\ python_requirement(name='mypy', requirements=['{MyPy.default_version}']) python_requirement(name="more-itertools", requirements=["more-itertools==8.4.0"]) """ ), "pants-plugins/plugins/subdir/__init__.py": "", "pants-plugins/plugins/subdir/dep.py": dedent( """\ from more_itertools import flatten def is_overridable_function(name: str) -> bool: assert list(flatten([[1, 2], [3, 4]])) == [1, 2, 3, 4] return name.endswith("__overridden_by_plugin") """ ), "pants-plugins/plugins/subdir/BUILD": "python_sources()", # The plugin can depend on code located anywhere in the project; its dependencies need # not be in the same directory. f"{PACKAGE}/subdir/__init__.py": "", f"{PACKAGE}/subdir/util.py": "def noop() -> None:\n pass\n", f"{PACKAGE}/subdir/BUILD": "python_sources()", "pants-plugins/plugins/__init__.py": "", "pants-plugins/plugins/change_return_type.py": plugin_file, "pants-plugins/plugins/BUILD": "python_sources()", f"{PACKAGE}/__init__.py": "", f"{PACKAGE}/f.py": dedent( """\ def add(x: int, y: int) -> int: return x + y def add__overridden_by_plugin(x: int, y: int) -> int: return x + y result = add__overridden_by_plugin(1, 1) assert add(result, 2) == 4 """ ), f"{PACKAGE}/BUILD": "python_sources()", "mypy.ini": dedent( """\ [mypy] plugins = plugins.change_return_type """ ), } ) def run_mypy_with_plugin(tgt: Target) -> CheckResult: result = run_mypy( rule_runner, [tgt], extra_args=[ "--mypy-source-plugins=['pants-plugins/plugins']", "--mypy-lockfile=<none>", "--source-root-patterns=['pants-plugins', 'src/py']", ], ) assert len(result) == 1 return result[0] tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="f.py")) result = run_mypy_with_plugin(tgt) assert result.exit_code == 1 assert f"{PACKAGE}/f.py:8" in result.stdout # Ensure we don't accidentally check the source plugin itself. assert "(checked 1 source file)" in result.stdout # Ensure that running MyPy on the plugin itself still works. plugin_tgt = rule_runner.get_target( Address("pants-plugins/plugins", relative_file_path="change_return_type.py") ) result = run_mypy_with_plugin(plugin_tgt) assert result.exit_code == 0 assert "Success: no issues found in 1 source file" in result.stdout
def test_passing_source(rule_runner: RuleRunner) -> None: rule_runner.write_files({f"{PACKAGE}/f.py": GOOD_FILE, f"{PACKAGE}/BUILD": "python_library()"}) tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="f.py")) assert_success(rule_runner, tgt)
def test_generates_python(rule_runner: RuleRunner) -> None: # This tests a few things: # * We generate the correct file names. # * Protobuf files can import other protobuf files, and those can import others # (transitive dependencies). We'll only generate the requested target, though. # * We can handle multiple source roots, which need to be preserved in the final output. rule_runner.write_files( { "src/protobuf/dir1/f.proto": dedent( """\ syntax = "proto3"; package dir1; message Person { string name = 1; int32 id = 2; string email = 3; } """ ), "src/protobuf/dir1/f2.proto": dedent( """\ syntax = "proto3"; package dir1; """ ), "src/protobuf/dir1/BUILD": "protobuf_sources()", "src/protobuf/dir2/f.proto": dedent( """\ syntax = "proto3"; package dir2; import "dir1/f.proto"; """ ), "src/protobuf/dir2/BUILD": ( "protobuf_sources(dependencies=['src/protobuf/dir1'], " "python_source_root='src/python')" ), # Test another source root. "tests/protobuf/test_protos/f.proto": dedent( """\ syntax = "proto3"; package test_protos; import "dir2/f.proto"; """ ), "tests/protobuf/test_protos/BUILD": ( "protobuf_sources(dependencies=['src/protobuf/dir2'])" ), } ) def assert_gen(addr: Address, expected: str) -> None: assert_files_generated( rule_runner, addr, source_roots=["src/python", "/src/protobuf", "/tests/protobuf"], expected_files=[expected], ) assert_gen( Address("src/protobuf/dir1", relative_file_path="f.proto"), "src/protobuf/dir1/f_pb2.py" ) assert_gen( Address("src/protobuf/dir1", relative_file_path="f2.proto"), "src/protobuf/dir1/f2_pb2.py" ) assert_gen( Address("src/protobuf/dir2", relative_file_path="f.proto"), "src/python/dir2/f_pb2.py" ) assert_gen( Address("tests/protobuf/test_protos", relative_file_path="f.proto"), "tests/protobuf/test_protos/f_pb2.py", )
def test_generate_source_and_test_targets() -> None: rule_runner = RuleRunner( rules=[ *target_types.rules(), QueryRule(GeneratedTargets, [GenerateTargetsFromShunit2Tests]), QueryRule(GeneratedTargets, [GenerateTargetsFromShellSources]), ], target_types=[ Shunit2TestsGeneratorTarget, ShellSourcesGeneratorTarget ], ) rule_runner.write_files({ "src/sh/BUILD": dedent("""\ shell_sources( name='lib', sources=['**/*.sh', '!**/*_test.sh'], overrides={'f1.sh': {'tags': ['overridden']}}, ) shunit2_tests( name='tests', sources=['**/*_test.sh'], overrides={'f1_test.sh': {'tags': ['overridden']}}, ) """), "src/sh/f1.sh": "", "src/sh/f1_test.sh": "", "src/sh/f2.sh": "", "src/sh/f2_test.sh": "", "src/sh/subdir/f.sh": "", "src/sh/subdir/f_test.sh": "", }) sources_generator = rule_runner.get_target( Address("src/sh", target_name="lib")) tests_generator = rule_runner.get_target( Address("src/sh", target_name="tests")) def gen_source_tgt(rel_fp: str, tags: list[str] | None = None) -> ShellSourceTarget: return ShellSourceTarget( { SingleSourceField.alias: rel_fp, Tags.alias: tags }, Address("src/sh", target_name="lib", relative_file_path=rel_fp), residence_dir=os.path.dirname(os.path.join("src/sh", rel_fp)), ) def gen_test_tgt(rel_fp: str, tags: list[str] | None = None) -> Shunit2TestTarget: return Shunit2TestTarget( { SingleSourceField.alias: rel_fp, Tags.alias: tags }, Address("src/sh", target_name="tests", relative_file_path=rel_fp), residence_dir=os.path.dirname(os.path.join("src/sh", rel_fp)), ) sources_generated = rule_runner.request( GeneratedTargets, [GenerateTargetsFromShellSources(sources_generator)]) tests_generated = rule_runner.request( GeneratedTargets, [GenerateTargetsFromShunit2Tests(tests_generator)]) assert sources_generated == GeneratedTargets( sources_generator, { gen_source_tgt("f1.sh", tags=["overridden"]), gen_source_tgt("f2.sh"), gen_source_tgt("subdir/f.sh"), }, ) assert tests_generated == GeneratedTargets( tests_generator, { gen_test_tgt("f1_test.sh", tags=["overridden"]), gen_test_tgt("f2_test.sh"), gen_test_tgt("subdir/f_test.sh"), }, )
def test_respects_passthrough_args(rule_runner: RuleRunner) -> None: rule_runner.write_files({f"{PACKAGE}/f.py": BAD_FILE, f"{PACKAGE}/BUILD": "python_library()"}) tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="f.py")) assert_success(rule_runner, tgt, extra_args=["--pylint-args='--disable=C0103'"])
def is_applicable(tgt_name: str, fp: str) -> bool: tgt = rule_runner.get_target( Address(PACKAGE, target_name=tgt_name, relative_file_path=fp)) return PythonTestFieldSet.is_applicable(tgt)
def test_dependency_inference(rule_runner: RuleRunner, caplog) -> None: rule_runner.set_options(["--source-root-patterns=['src/protos']"]) rule_runner.write_files({ "src/protos/project/f1.proto": dedent("""\ import 'tests/f.proto'; import 'unrelated_path/foo.proto"; """), "src/protos/project/f2.proto": "import 'project/f1.proto';", "src/protos/project/BUILD": "protobuf_library()", "src/protos/tests/f.proto": "", "src/protos/tests/BUILD": "protobuf_library()", # Test handling of ambiguous imports. We should warn on the ambiguous dependency, but # not warn on the disambiguated one and should infer a dep. "src/protos/ambiguous/dep.proto": "", "src/protos/ambiguous/disambiguated.proto": "", "src/protos/ambiguous/main.proto": dedent("""\ import 'ambiguous/dep.proto'; import 'ambiguous/disambiguated.proto"; """), "src/protos/ambiguous/BUILD": dedent("""\ protobuf_library(name='dep1', sources=['dep.proto', 'disambiguated.proto']) protobuf_library(name='dep2', sources=['dep.proto', 'disambiguated.proto']) protobuf_library( name='main', sources=['main.proto'], dependencies=['!./disambiguated.proto:dep2'], ) """), }) def run_dep_inference(address: Address) -> InferredDependencies: tgt = rule_runner.get_target(address) return rule_runner.request( InferredDependencies, [InferProtobufDependencies(tgt[ProtobufSources])]) build_address = Address("src/protos/project") assert run_dep_inference(build_address) == InferredDependencies( [ Address("src/protos/tests", relative_file_path="f.proto"), Address("src/protos/project", relative_file_path="f1.proto"), ], sibling_dependencies_inferrable=True, ) file_address = Address("src/protos/project", relative_file_path="f1.proto") assert run_dep_inference(file_address) == InferredDependencies( [Address("src/protos/tests", relative_file_path="f.proto")], sibling_dependencies_inferrable=True, ) caplog.clear() assert run_dep_inference( Address("src/protos/ambiguous", target_name="main")) == InferredDependencies( [ Address("src/protos/ambiguous", target_name="dep1", relative_file_path="disambiguated.proto") ], sibling_dependencies_inferrable=True, ) assert len(caplog.records) == 1 assert "The target src/protos/ambiguous:main imports `ambiguous/dep.proto`" in caplog.text assert ( "['src/protos/ambiguous/dep.proto:dep1', 'src/protos/ambiguous/dep.proto:dep2']" in caplog.text) assert "disambiguated.proto" not in caplog.text
def test_owned_dependencies() -> None: rule_runner = create_setup_py_rule_runner(rules=[ get_owned_dependencies, get_exporting_owner, QueryRule(OwnedDependencies, (DependencyOwner, )), ]) rule_runner.add_to_build_file( "src/python/foo/bar/baz", textwrap.dedent(""" python_library(name='baz1', sources=[]) python_library(name='baz2', sources=[]) """), ) rule_runner.add_to_build_file( "src/python/foo/bar", textwrap.dedent(""" python_distribution( name='bar1-dist', dependencies=[':bar1'], provides=setup_py(name='bar1', version='1.1.1'), ) python_library( name='bar1', sources=[], dependencies=['src/python/foo/bar/baz:baz1'], ) python_library( name='bar2', sources=[], dependencies=[':bar-resources', 'src/python/foo/bar/baz:baz2'], ) resources(name='bar-resources', sources=[]) """), ) rule_runner.add_to_build_file( "src/python/foo", textwrap.dedent(""" python_distribution( name='foo-dist', dependencies=[':foo'], provides=setup_py(name='foo', version='3.4.5'), ) python_library( sources=[], dependencies=['src/python/foo/bar:bar1', 'src/python/foo/bar:bar2'], ) """), ) def assert_owned(owned: Iterable[str], exported: Address): tgt = rule_runner.get_target(exported) assert sorted(owned) == sorted( od.target.address.spec for od in rule_runner.request( OwnedDependencies, [DependencyOwner(ExportedTarget(tgt))], )) assert_owned( [ "src/python/foo/bar:bar1", "src/python/foo/bar:bar1-dist", "src/python/foo/bar/baz:baz1" ], Address("src/python/foo/bar", target_name="bar1-dist"), ) assert_owned( [ "src/python/foo", "src/python/foo:foo-dist", "src/python/foo/bar:bar2", "src/python/foo/bar:bar-resources", "src/python/foo/bar/baz:baz2", ], Address("src/python/foo", target_name="foo-dist"), )
def test_get_owner_simple(exporting_owner_rule_runner: RuleRunner) -> None: exporting_owner_rule_runner.add_to_build_file( "src/python/foo/bar/baz", textwrap.dedent(""" python_library(name='baz1', sources=[]) python_library(name='baz2', sources=[]) """), ) exporting_owner_rule_runner.add_to_build_file( "src/python/foo/bar", textwrap.dedent(""" python_distribution( name='bar1', dependencies=['src/python/foo/bar/baz:baz1'], provides=setup_py(name='bar1', version='1.1.1'), ) python_library( name='bar2', sources=[], dependencies=[':bar-resources', 'src/python/foo/bar/baz:baz2'], ) resources(name='bar-resources', sources=[]) """), ) exporting_owner_rule_runner.add_to_build_file( "src/python/foo", textwrap.dedent(""" python_distribution( name='foo1', dependencies=['src/python/foo/bar/baz:baz2'], provides=setup_py(name='foo1', version='0.1.2'), ) python_library(name='foo2', sources=[]) python_distribution( name='foo3', dependencies=['src/python/foo/bar:bar2'], provides=setup_py(name='foo3', version='3.4.5'), ) """), ) assert_is_owner( exporting_owner_rule_runner, "src/python/foo/bar:bar1", Address("src/python/foo/bar", target_name="bar1"), ) assert_is_owner( exporting_owner_rule_runner, "src/python/foo/bar:bar1", Address("src/python/foo/bar/baz", target_name="baz1"), ) assert_is_owner( exporting_owner_rule_runner, "src/python/foo:foo1", Address("src/python/foo", target_name="foo1"), ) assert_is_owner( exporting_owner_rule_runner, "src/python/foo:foo3", Address("src/python/foo", target_name="foo3"), ) assert_is_owner( exporting_owner_rule_runner, "src/python/foo:foo3", Address("src/python/foo/bar", target_name="bar2"), ) assert_is_owner( exporting_owner_rule_runner, "src/python/foo:foo3", Address("src/python/foo/bar", target_name="bar-resources"), ) assert_no_owner(exporting_owner_rule_runner, Address("src/python/foo", target_name="foo2")) assert_ambiguous_owner( exporting_owner_rule_runner, Address("src/python/foo/bar/baz", target_name="baz2"))
def test_get_requirements() -> None: rule_runner = create_setup_py_rule_runner(rules=[ determine_setup_kwargs, get_requirements, get_owned_dependencies, get_exporting_owner, SubsystemRule(SetupPyGeneration), QueryRule(ExportedTargetRequirements, (DependencyOwner, )), ]) rule_runner.add_to_build_file( "3rdparty", textwrap.dedent(""" python_requirement_library( name='ext1', requirements=['ext1==1.22.333'], ) python_requirement_library( name='ext2', requirements=['ext2==4.5.6'], ) python_requirement_library( name='ext3', requirements=['ext3==0.0.1'], ) """), ) rule_runner.add_to_build_file( "src/python/foo/bar/baz", "python_library(dependencies=['3rdparty:ext1'], sources=[])", ) rule_runner.add_to_build_file( "src/python/foo/bar/qux", "python_library(dependencies=['3rdparty:ext2', 'src/python/foo/bar/baz'], sources=[])", ) rule_runner.add_to_build_file( "src/python/foo/bar", textwrap.dedent(""" python_distribution( name='bar-dist', dependencies=[':bar'], provides=setup_py(name='bar', version='9.8.7'), ) python_library( sources=[], dependencies=['src/python/foo/bar/baz', 'src/python/foo/bar/qux'], ) """), ) rule_runner.add_to_build_file( "src/python/foo/corge", textwrap.dedent(""" python_distribution( name='corge-dist', # Tests having a 3rdparty requirement directly on a python_distribution. dependencies=[':corge', '3rdparty:ext3'], provides=setup_py(name='corge', version='2.2.2'), ) python_library( sources=[], dependencies=['src/python/foo/bar'], ) """), ) assert_requirements( rule_runner, ["ext1==1.22.333", "ext2==4.5.6"], Address("src/python/foo/bar", target_name="bar-dist"), ) assert_requirements( rule_runner, ["ext3==0.0.1", "bar==9.8.7"], Address("src/python/foo/corge", target_name="corge-dist"), ) assert_requirements( rule_runner, ["ext3==0.0.1", "bar~=9.8.7"], Address("src/python/foo/corge", target_name="corge-dist"), version_scheme=FirstPartyDependencyVersionScheme.COMPATIBLE, ) assert_requirements( rule_runner, ["ext3==0.0.1", "bar"], Address("src/python/foo/corge", target_name="corge-dist"), version_scheme=FirstPartyDependencyVersionScheme.ANY, )
def test_get_sources() -> None: rule_runner = create_setup_py_rule_runner(rules=[ get_sources, *python_sources.rules(), QueryRule(SetupPySources, (SetupPySourcesRequest, )), ]) rule_runner.add_to_build_file( "src/python/foo/bar/baz", textwrap.dedent(""" python_library(name='baz1', sources=['baz1.py']) python_library(name='baz2', sources=['baz2.py']) """), ) rule_runner.create_file("src/python/foo/bar/baz/baz1.py") rule_runner.create_file("src/python/foo/bar/baz/baz2.py") rule_runner.create_file("src/python/foo/bar/__init__.py", _namespace_decl) rule_runner.add_to_build_file("src/python/foo/qux", "python_library()") rule_runner.create_file("src/python/foo/qux/__init__.py") rule_runner.create_file("src/python/foo/qux/qux.py") rule_runner.add_to_build_file("src/python/foo/resources", 'resources(sources=["js/code.js"])') rule_runner.create_file("src/python/foo/resources/js/code.js") rule_runner.create_file("src/python/foo/__init__.py") def assert_sources( expected_files, expected_packages, expected_namespace_packages, expected_package_data, addrs, ): targets = Targets(rule_runner.get_target(addr) for addr in addrs) srcs = rule_runner.request( SetupPySources, [SetupPySourcesRequest(targets, py2=False)], ) chroot_snapshot = rule_runner.request(Snapshot, [srcs.digest]) assert sorted(expected_files) == sorted(chroot_snapshot.files) assert sorted(expected_packages) == sorted(srcs.packages) assert sorted(expected_namespace_packages) == sorted( srcs.namespace_packages) assert expected_package_data == dict(srcs.package_data) assert_sources( expected_files=[ "foo/bar/baz/baz1.py", "foo/bar/__init__.py", "foo/__init__.py" ], expected_packages=["foo", "foo.bar", "foo.bar.baz"], expected_namespace_packages=["foo.bar"], expected_package_data={}, addrs=[Address("src/python/foo/bar/baz", target_name="baz1")], ) assert_sources( expected_files=[ "foo/bar/baz/baz2.py", "foo/bar/__init__.py", "foo/__init__.py" ], expected_packages=["foo", "foo.bar", "foo.bar.baz"], expected_namespace_packages=["foo.bar"], expected_package_data={}, addrs=[Address("src/python/foo/bar/baz", target_name="baz2")], ) assert_sources( expected_files=[ "foo/qux/qux.py", "foo/qux/__init__.py", "foo/__init__.py" ], expected_packages=["foo", "foo.qux"], expected_namespace_packages=[], expected_package_data={}, addrs=[Address("src/python/foo/qux")], ) assert_sources( expected_files=[ "foo/bar/baz/baz1.py", "foo/bar/__init__.py", "foo/qux/qux.py", "foo/qux/__init__.py", "foo/__init__.py", "foo/resources/js/code.js", ], expected_packages=["foo", "foo.bar", "foo.bar.baz", "foo.qux"], expected_namespace_packages=["foo.bar"], expected_package_data={"foo": ("resources/js/code.js", )}, addrs=[ Address("src/python/foo/bar/baz", target_name="baz1"), Address("src/python/foo/qux"), Address("src/python/foo/resources"), ], ) assert_sources( expected_files=[ "foo/bar/baz/baz1.py", "foo/bar/baz/baz2.py", "foo/bar/__init__.py", "foo/qux/qux.py", "foo/qux/__init__.py", "foo/__init__.py", "foo/resources/js/code.js", ], expected_packages=["foo", "foo.bar", "foo.bar.baz", "foo.qux"], expected_namespace_packages=["foo.bar"], expected_package_data={"foo": ("resources/js/code.js", )}, addrs=[ Address("src/python/foo/bar/baz", target_name="baz1"), Address("src/python/foo/bar/baz", target_name="baz2"), Address("src/python/foo/qux"), Address("src/python/foo/resources"), ], )
def test_generate_chroot(chroot_rule_runner: RuleRunner) -> None: chroot_rule_runner.add_to_build_file( "src/python/foo/bar/baz", textwrap.dedent(""" python_distribution( name="baz-dist", dependencies=[':baz'], provides=setup_py( name='baz', version='1.1.1' ) ) python_library() """), ) chroot_rule_runner.create_file("src/python/foo/bar/baz/baz.py") chroot_rule_runner.add_to_build_file( "src/python/foo/qux", textwrap.dedent(""" python_library() pex_binary(name="bin", entry_point="foo.qux.bin:main") """), ) chroot_rule_runner.create_file("src/python/foo/qux/__init__.py") chroot_rule_runner.create_file("src/python/foo/qux/qux.py") # Add a `.pyi` stub file to ensure we include it in the final result. chroot_rule_runner.create_file("src/python/foo/qux/qux.pyi") chroot_rule_runner.add_to_build_file("src/python/foo/resources", 'resources(sources=["js/code.js"])') chroot_rule_runner.create_file("src/python/foo/resources/js/code.js") chroot_rule_runner.add_to_build_file("files", 'files(sources=["README.txt"])') chroot_rule_runner.create_file("files/README.txt") chroot_rule_runner.add_to_build_file( "src/python/foo", textwrap.dedent(""" python_distribution( name='foo-dist', dependencies=[ ':foo', ], provides=setup_py( name='foo', version='1.2.3' ).with_binaries( foo_main='src/python/foo/qux:bin' ) ) python_library( dependencies=[ 'src/python/foo/bar/baz', 'src/python/foo/qux', 'src/python/foo/resources', 'files', ] ) """), ) chroot_rule_runner.create_file("src/python/foo/__init__.py", _namespace_decl) chroot_rule_runner.create_file("src/python/foo/foo.py") assert_chroot( chroot_rule_runner, [ "src/files/README.txt", "src/foo/qux/__init__.py", "src/foo/qux/qux.py", "src/foo/qux/qux.pyi", "src/foo/resources/js/code.js", "src/foo/__init__.py", "src/foo/foo.py", "setup.py", "MANIFEST.in", ], { "name": "foo", "version": "1.2.3", "plugin_demo": "hello world", "package_dir": { "": "src" }, "packages": ("foo", "foo.qux"), "namespace_packages": ("foo", ), "package_data": { "foo": ("resources/js/code.js", ) }, "install_requires": ("baz==1.1.1", ), "entry_points": { "console_scripts": ["foo_main=foo.qux.bin:main"] }, }, Address("src/python/foo", target_name="foo-dist"), )
def create_for_test(cls, address: str, compat: Optional[str]) -> "MockFieldSet": addr = Address.parse(address) return cls(address=addr, compatibility=PythonInterpreterCompatibility(compat, address=addr))
def inject_smalltalk_deps( _: InjectSmalltalkDependencies) -> InjectedDependencies: return InjectedDependencies([Address.parse("//:injected")])
def test_failed_run(rule_runner: RuleRunner) -> None: program_text = b'#!/usr/bin/python\nraise RuntimeError("foo")' res = single_target_run(rule_runner, Address("some/addr"), program_text=program_text) assert res.exit_code == 1
def inject_custom_smalltalk_deps( _: InjectCustomSmalltalkDependencies) -> InjectedDependencies: return InjectedDependencies([Address.parse("//:custom_injected")])
def get_archive(target_name: str) -> FileContent: tgt = rule_runner.get_target(Address("", target_name=target_name)) built_package = rule_runner.request(BuiltPackage, [ArchiveFieldSet.create(tgt)]) digest_contents = rule_runner.request(DigestContents, [built_package.digest]) assert len(digest_contents) == 1 return digest_contents[0]
def test_find_valid_field_sets(self) -> None: origin = FilesystemLiteralSpec("f.txt") valid_tgt = FortranTarget({}, address=Address.parse(":valid")) valid_tgt_with_origin = TargetWithOrigin(valid_tgt, origin) invalid_tgt = self.InvalidTarget({}, address=Address.parse(":invalid")) invalid_tgt_with_origin = TargetWithOrigin(invalid_tgt, origin) def find_valid_field_sets( superclass: Type, targets_with_origins: Iterable[TargetWithOrigin], *, error_if_no_valid_targets: bool = False, expect_single_config: bool = False, ) -> TargetsToValidFieldSets: request = TargetsToValidFieldSetsRequest( superclass, goal_description="fake", error_if_no_valid_targets=error_if_no_valid_targets, expect_single_field_set=expect_single_config, ) return self.request_single_product( TargetsToValidFieldSets, Params( request, TargetsWithOrigins(targets_with_origins), ), ) valid = find_valid_field_sets( self.FieldSetSuperclass, [valid_tgt_with_origin, invalid_tgt_with_origin]) assert valid.targets == (valid_tgt, ) assert valid.targets_with_origins == (valid_tgt_with_origin, ) assert valid.field_sets == ( self.FieldSetSubclass1.create(valid_tgt), self.FieldSetSubclass2.create(valid_tgt), ) with pytest.raises(ExecutionError) as exc: find_valid_field_sets(self.FieldSetSuperclass, [valid_tgt_with_origin], expect_single_config=True) assert AmbiguousImplementationsException.__name__ in str(exc.value) with pytest.raises(ExecutionError) as exc: find_valid_field_sets( self.FieldSetSuperclass, [ valid_tgt_with_origin, TargetWithOrigin( FortranTarget({}, address=Address.parse(":valid2")), origin), ], expect_single_config=True, ) assert TooManyTargetsException.__name__ in str(exc.value) no_valid_targets = find_valid_field_sets(self.FieldSetSuperclass, [invalid_tgt_with_origin]) assert no_valid_targets.targets == () assert no_valid_targets.targets_with_origins == () assert no_valid_targets.field_sets == () with pytest.raises(ExecutionError) as exc: find_valid_field_sets(self.FieldSetSuperclass, [invalid_tgt_with_origin], error_if_no_valid_targets=True) assert NoValidTargetsException.__name__ in str(exc.value) valid_with_origin = find_valid_field_sets( self.FieldSetSuperclassWithOrigin, [valid_tgt_with_origin, invalid_tgt_with_origin]) assert valid_with_origin.targets == (valid_tgt, ) assert valid_with_origin.targets_with_origins == ( valid_tgt_with_origin, ) assert valid_with_origin.field_sets == ( self.FieldSetSubclassWithOrigin.create(valid_tgt_with_origin), )
def test_source_plugin(rule_runner: RuleRunner) -> None: # NB: We make this source plugin fairly complex by having it use transitive dependencies. # This is to ensure that we can correctly support plugins with dependencies. # The plugin bans `print()`. rule_runner.write_files( { "BUILD": dedent( """\ python_requirement_library( name='pylint', requirements=['pylint>=2.4.4,<2.5'], ) python_requirement_library( name='colors', requirements=['ansicolors'], ) """ ), "pants-plugins/plugins/subdir/dep.py": dedent( """\ from colors import red def is_print(node): _ = red("Test that transitive deps are loaded.") return hasattr(node.func, "name") and node.func.name == "print" """ ), "pants-plugins/plugins/subdir/BUILD": "python_library(dependencies=['//:colors'])", "pants-plugins/plugins/print_plugin.py": dedent( """\ '''Docstring.''' from pylint.checkers import BaseChecker from pylint.interfaces import IAstroidChecker from subdir.dep import is_print class PrintChecker(BaseChecker): '''Docstring.''' __implements__ = IAstroidChecker name = "print_plugin" msgs = { "C9871": ("`print` statements are banned", "print-statement-used", ""), } def visit_call(self, node): '''Docstring.''' if is_print(node): self.add_message("print-statement-used", node=node) def register(linter): '''Docstring.''' linter.register_checker(PrintChecker(linter)) """ ), "pants-plugins/plugins/BUILD": ( "python_library(dependencies=['//:pylint', 'pants-plugins/plugins/subdir'])" ), "pylintrc": dedent( """\ [MASTER] load-plugins=print_plugin """ ), f"{PACKAGE}/f.py": "'''Docstring.'''\nprint()\n", f"{PACKAGE}/BUILD": "python_library()", } ) def run_pylint_with_plugin(tgt: Target) -> LintResult: res = run_pylint( rule_runner, [tgt], extra_args=[ "--pylint-source-plugins=['pants-plugins/plugins']", f"--source-root-patterns=['pants-plugins/plugins', '{PACKAGE}']", "--pylint-config=pylintrc", ], ) assert len(res) == 1 return res[0] tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="f.py")) result = run_pylint_with_plugin(tgt) assert result.exit_code == PYLINT_FAILURE_RETURN_CODE assert f"{PACKAGE}/f.py:2:0: C9871" in result.stdout # Ensure that running Pylint on the plugin itself still works. plugin_tgt = rule_runner.get_target( Address("pants-plugins/plugins", relative_file_path="print_plugin.py") ) result = run_pylint_with_plugin(plugin_tgt) assert result.exit_code == 0 assert "Your code has been rated at 10.00/10" in result.stdout
), pytest.param( [ TargetData( FilesGeneratorTarget( { "sources": ["*.txt"], # Regression test that we can handle a dict with `tuple[str, ...]` as # key. "overrides": { ("foo.txt", ): { "tags": ["overridden"] } }, }, Address("example", target_name="files_target"), ), ("foo.txt", "bar.txt"), tuple(), ) ], True, dedent("""\ [ { "address": "example:files_target", "target_type": "files", "dependencies": [], "overrides": { "('foo.txt',)": { "tags": [
def test_expand(expected: list[tuple[str, dict[str, Any]]], fields: dict[str, Any | Parametrize]) -> None: assert sorted(expected) == sorted( (address.spec, result_fields) for address, result_fields in Parametrize.expand(Address("a"), fields))
def make_target(address: Optional[Address] = None) -> Target: if address is None: address = Address("", target_name="tests") return MockTarget({}, address)
def make_target(rule_runner: RuleRunner, source_files: List[FileContent]) -> Target: for source_file in source_files: rule_runner.create_file(f"{source_file.path}", source_file.content.decode()) return PythonLibrary({}, address=Address("", target_name="target"))
def test_invalid_fields_rejected() -> None: with pytest.raises(InvalidFieldException) as exc: FortranTarget({"invalid_field": True}, address=Address("", target_name="lib")) assert "Unrecognized field `invalid_field=True`" in str(exc) assert "//:lib" in str(exc)
def test_skip(rule_runner: RuleRunner) -> None: rule_runner.write_files({f"{PACKAGE}/f.py": BAD_FILE, f"{PACKAGE}/BUILD": "python_sources()"}) tgt = rule_runner.get_target(Address(PACKAGE, relative_file_path="f.py")) result = run_mypy(rule_runner, [tgt], extra_args=["--mypy-skip"]) assert not result
def test_override_preexisting_field_via_new_target() -> None: # To change the behavior of a pre-existing field, you must create a new target as it would not # be safe to allow plugin authors to change the behavior of core target types. # # Because the Target API does not care about the actual target type and we only check that the # target has the required fields via Target.has_fields(), it is safe to create a new target # that still works where the original target was expected. # # However, this means that we must ensure `Target.get()` and `Target.has_fields()` will work # with subclasses of the original `Field`s. class CustomFortranExtensions(FortranExtensions): banned_extensions = ("FortranBannedExt",) default_extensions = ("FortranCustomExt",) @classmethod def compute_value( cls, raw_value: Optional[Iterable[str]], *, address: Address ) -> Tuple[str, ...]: # Ensure that we avoid certain problematic extensions and always use some defaults. specified_extensions = super().compute_value(raw_value, address=address) banned = [ extension for extension in specified_extensions if extension in cls.banned_extensions ] if banned: raise InvalidFieldException( f"The {repr(cls.alias)} field in target {address} is using banned " f"extensions: {banned}" ) return (*specified_extensions, *cls.default_extensions) class CustomFortranTarget(Target): alias = "custom_fortran" core_fields = tuple( {*FortranTarget.core_fields, CustomFortranExtensions} - {FortranExtensions} ) custom_tgt = CustomFortranTarget( {FortranExtensions.alias: ["FortranExt1"]}, address=Address("", target_name="custom") ) assert custom_tgt.has_field(FortranExtensions) is True assert custom_tgt.has_field(CustomFortranExtensions) is True assert custom_tgt.has_fields([FortranExtensions, CustomFortranExtensions]) is True assert ( CustomFortranTarget.class_get_field(FortranExtensions, union_membership=UnionMembership({})) is CustomFortranExtensions ) # Ensure that subclasses not defined on a target are not accepted. This allows us to, for # example, filter every target with `PythonSources` (or a subclass) and to ignore targets with # only `Sources`. normal_tgt = FortranTarget({}, address=Address("", target_name="normal")) assert normal_tgt.has_field(FortranExtensions) is True assert normal_tgt.has_field(CustomFortranExtensions) is False assert custom_tgt[FortranExtensions] == custom_tgt[CustomFortranExtensions] assert custom_tgt[FortranExtensions].value == ( "FortranExt1", *CustomFortranExtensions.default_extensions, ) # Check custom default value assert ( CustomFortranTarget({}, address=Address("", target_name="default"))[FortranExtensions].value == CustomFortranExtensions.default_extensions ) # Custom validation with pytest.raises(InvalidFieldException) as exc: CustomFortranTarget( {FortranExtensions.alias: CustomFortranExtensions.banned_extensions}, address=Address("", target_name="invalid"), ) assert str(list(CustomFortranExtensions.banned_extensions)) in str(exc) assert "//:invalid" in str(exc)
def test_partition_targets(rule_runner: RuleRunner) -> None: def create_folder(folder: str, resolve: str, interpreter: str) -> dict[str, str]: return { f"{folder}/dep.py": "", f"{folder}/root.py": "", f"{folder}/BUILD": dedent( f"""\ python_source( name='dep', source='dep.py', resolve='{resolve}', interpreter_constraints=['=={interpreter}.*'], ) python_source( name='root', source='root.py', resolve='{resolve}', interpreter_constraints=['=={interpreter}.*'], dependencies=[':dep'], ) """ ), } files = { **create_folder("resolveA_py38", "a", "3.8"), **create_folder("resolveA_py39", "a", "3.9"), **create_folder("resolveB_1", "b", "3.9"), **create_folder("resolveB_2", "b", "3.9"), } rule_runner.write_files(files) # type: ignore[arg-type] rule_runner.set_options( ["--python-resolves={'a': '', 'b': ''}", "--python-enable-resolves"], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) resolve_a_py38_dep = rule_runner.get_target(Address("resolveA_py38", target_name="dep")) resolve_a_py38_root = rule_runner.get_target(Address("resolveA_py38", target_name="root")) resolve_a_py39_dep = rule_runner.get_target(Address("resolveA_py39", target_name="dep")) resolve_a_py39_root = rule_runner.get_target(Address("resolveA_py39", target_name="root")) resolve_b_dep1 = rule_runner.get_target(Address("resolveB_1", target_name="dep")) resolve_b_root1 = rule_runner.get_target(Address("resolveB_1", target_name="root")) resolve_b_dep2 = rule_runner.get_target(Address("resolveB_2", target_name="dep")) resolve_b_root2 = rule_runner.get_target(Address("resolveB_2", target_name="root")) request = MyPyRequest( MyPyFieldSet.create(t) for t in ( resolve_a_py38_root, resolve_a_py39_root, resolve_b_root1, resolve_b_root2, ) ) partitions = rule_runner.request(MyPyPartitions, [request]) assert len(partitions) == 3 def assert_partition( partition: MyPyPartition, roots: list[Target], deps: list[Target], interpreter: str ) -> None: root_addresses = {t.address for t in roots} assert {t.address for t in partition.root_targets} == root_addresses assert {t.address for t in partition.closure} == { *root_addresses, *(t.address for t in deps), } assert partition.interpreter_constraints == InterpreterConstraints([f"=={interpreter}.*"]) assert_partition(partitions[0], [resolve_a_py38_root], [resolve_a_py38_dep], "3.8") assert_partition(partitions[1], [resolve_a_py39_root], [resolve_a_py39_dep], "3.9") assert_partition( partitions[2], [resolve_b_root1, resolve_b_root2], [resolve_b_dep1, resolve_b_dep2], "3.9" )
def make_target(address: Optional[Address] = None) -> Target: return MockTarget({}, address=address or Address("", target_name="tests"))