def test_address_specs_do_not_exist( address_specs_rule_runner: RuleRunner) -> None: address_specs_rule_runner.create_file("real/f.txt") address_specs_rule_runner.add_to_build_file("real", "mock_tgt(sources=['f.txt'])") address_specs_rule_runner.add_to_build_file("empty", "# empty") def assert_resolve_error(specs: Iterable[AddressSpec], *, expected: str) -> None: with pytest.raises(ExecutionError) as exc: resolve_address_specs(address_specs_rule_runner, specs) assert expected in str(exc.value) # Literal addresses require both a BUILD file to exist and for a target to be resolved. assert_resolve_error([AddressLiteralSpec("fake", "tgt")], expected="'fake' does not exist on disk") assert_resolve_error( [AddressLiteralSpec("fake/f.txt", "tgt")], expected="'fake/f.txt' does not exist on disk", ) did_you_mean = ResolveError.did_you_mean(bad_name="fake_tgt", known_names=["real"], namespace="real") assert_resolve_error([AddressLiteralSpec("real", "fake_tgt")], expected=str(did_you_mean)) assert_resolve_error([AddressLiteralSpec("real/f.txt", "fake_tgt")], expected=str(did_you_mean)) # SiblingAddresses require the BUILD file to exist, but are okay if no targets are resolved. assert_resolve_error( [SiblingAddresses("fake")], expected= ("'fake' does not contain any BUILD files, but 'fake:' expected matching targets " "there."), ) assert not resolve_address_specs(address_specs_rule_runner, [SiblingAddresses("empty")]) # DescendantAddresses requires at least one match, even if BUILD files exist. assert_resolve_error( [DescendantAddresses("fake"), DescendantAddresses("empty")], expected="Address spec 'fake::' does not match any targets", ) # AscendantAddresses does not require any matches or BUILD files. assert not resolve_address_specs( address_specs_rule_runner, [AscendantAddresses("fake"), AscendantAddresses("empty")])
def test_all_owned_sources(rule_runner: RuleRunner) -> None: for path in [ "dir/a.f90", "dir/b.f90", "dir/a_test.f90", "dir/unowned.txt", "unowned.txt", "unowned.f90", ]: rule_runner.create_file(path) rule_runner.add_to_build_file( "dir", "fortran_library()\nfortran_tests(name='tests')") assert rule_runner.request(AllOwnedSources, []) == AllOwnedSources( ["dir/a.f90", "dir/b.f90", "dir/a_test.f90"])
def test_transitive_dependencies(rule_runner: RuleRunner) -> None: rule_runner.create_file(f"{PACKAGE}/util/__init__.py") rule_runner.create_file( f"{PACKAGE}/util/lib.py", dedent( """\ def capitalize(v: str) -> str: return v.capitalize() """ ), ) rule_runner.add_to_build_file(f"{PACKAGE}/util", "python_library()") rule_runner.create_file(f"{PACKAGE}/math/__init__.py") rule_runner.create_file( f"{PACKAGE}/math/add.py", dedent( """\ from project.util.lib import capitalize def add(x: int, y: int) -> str: sum = x + y return capitalize(sum) # This is the wrong type. """ ), ) rule_runner.add_to_build_file( f"{PACKAGE}/math", "python_library()", ) sources_content = [ FileContent( f"{PACKAGE}/app.py", dedent( """\ from project.math.add import add print(add(2, 4)) """ ).encode(), ), FileContent(f"{PACKAGE}/__init__.py", b""), ] target = make_target(rule_runner, sources_content) result = run_mypy(rule_runner, [target]) assert len(result) == 1 assert result[0].exit_code == 1 assert f"{PACKAGE}/math/add.py:5" in result[0].stdout
def assert_pants_requirement( rule_runner: RuleRunner, build_file_entry: str, *, expected_target_name: str, expected_dist: str = "pantsbuild.pants", expected_module: str = "pants", ) -> None: rule_runner.add_to_build_file("3rdparty/python", f"{build_file_entry}\n") target = rule_runner.get_target( Address("3rdparty/python", target_name=expected_target_name)) assert isinstance(target, PythonRequirementTarget) assert target[PythonRequirementsField].value == ( PipRequirement.parse(f"{expected_dist}=={pants_version()}"), ) assert target[PythonRequirementModulesField].value == (expected_module, )
def test_grpc(rule_runner: RuleRunner) -> None: rule_runner.create_file( "src/protobuf/dir1/f.proto", dedent(GRPC_PROTO_STANZA), ) rule_runner.add_to_build_file("src/protobuf/dir1", "protobuf_library(grpc=True)") assert_files_generated( rule_runner, "src/protobuf/dir1", source_roots=["src/protobuf"], expected_files=[ "src/protobuf/dir1/f_pb2.py", "src/protobuf/dir1/f_pb2_grpc.py" ], )
def test_works_with_python27(rule_runner: RuleRunner) -> None: """A regression test that we can properly handle Python 2-only third-party dependencies. There was a bug that this would cause the runner PEX to fail to execute because it did not have Python 3 distributions of the requirements. """ rule_runner.add_to_build_file( "", dedent("""\ # Both requirements are a) typed and b) compatible with Py2 and Py3. However, `x690` # has a distinct wheel for Py2 vs. Py3, whereas libumi has a universal wheel. We expect # both to be usable, even though libumi is not compatible with Py3. python_requirement_library( name="libumi", requirements=["libumi==0.0.2"], ) python_requirement_library( name="x690", requirements=["x690==0.2.0"], ) """), ) source_file = FileContent( f"{PACKAGE}/py2.py", dedent("""\ from libumi import hello_world from x690 import types print "Blast from the past!" print hello_world() - 21 # MyPy should fail. You can't subtract an `int` from `bytes`. """).encode(), ) target = make_target(rule_runner, [source_file], interpreter_constraints="==2.7.*") result = run_mypy(rule_runner, [target], passthrough_args="--py2") assert len(result) == 1 assert result[0].exit_code == 1 assert "Failed to execute PEX file" not in result[0].stderr assert ( "Cannot find implementation or library stub for module named 'x690'" not in result[0].stdout) assert ( "Cannot find implementation or library stub for module named 'libumi'" not in result[0].stdout) assert f"{PACKAGE}/py2.py:5: error: Unsupported operand types" in result[ 0].stdout
def test_target_adaptor_not_found( target_adaptor_rule_runner: RuleRunner) -> None: with pytest.raises(ExecutionError) as exc: target_adaptor_rule_runner.request(TargetAdaptor, [Address("helloworld")]) assert "Directory \\'helloworld\\' does not contain any BUILD files" in str( exc) target_adaptor_rule_runner.add_to_build_file("helloworld", "mock_tgt(name='other_tgt')") expected_rx_str = re.escape( "'helloworld' was not found in namespace 'helloworld'. Did you mean one of:\n :other_tgt" ) with pytest.raises(ExecutionError, match=expected_rx_str): target_adaptor_rule_runner.request(TargetAdaptor, [Address("helloworld")])
def assert_python_requirements( rule_runner: RuleRunner, build_file_entry: str, requirements_txt: str, *, expected_file_dep: PythonRequirementsFile, expected_targets: Iterable[PythonRequirementLibrary], requirements_txt_relpath: str = "requirements.txt", ) -> None: rule_runner.add_to_build_file("", f"{build_file_entry}\n") rule_runner.create_file(requirements_txt_relpath, requirements_txt) targets = rule_runner.request( Targets, [Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([]))], ) assert {expected_file_dep, *expected_targets} == set(targets)
def test_python_protobuf(rule_runner: RuleRunner) -> None: rule_runner.create_file( "src/protobuf/dir/f.proto", dedent("""\ syntax = "proto2"; package dir; """), ) rule_runner.create_file( "src/protobuf/other_dir/f.proto", dedent("""\ syntax = "proto2"; package other_dir; """), ) rule_runner.add_to_build_file("src/protobuf/dir", "protobuf_library()") rule_runner.add_to_build_file( "src/protobuf/other_dir", "protobuf_library(python_source_root='src/python')") targets = [ ProtobufLibrary({}, address=Address("src/protobuf/dir")), ProtobufLibrary({}, address=Address("src/protobuf/other_dir")), ] backend_args = ["--backend-packages=pants.backend.codegen.protobuf.python"] stripped_result = get_stripped_sources( rule_runner, targets, source_roots=["src/protobuf", "src/python"], extra_args=backend_args) assert stripped_result.stripped_source_files.snapshot.files == ( "dir/f_pb2.py", "other_dir/f_pb2.py", ) unstripped_result = get_unstripped_sources( rule_runner, targets, source_roots=["src/protobuf", "src/python"], extra_args=backend_args) assert unstripped_result.source_files.snapshot.files == ( "src/protobuf/dir/f_pb2.py", "src/python/other_dir/f_pb2.py", ) assert unstripped_result.source_roots == ("src/protobuf", "src/python")
def assert_pants_requirement( rule_runner: RuleRunner, build_file_entry: str, *, expected_target_name: str, expected_dist: str = "pantsbuild.pants", expected_module: str = "pants", ) -> None: rule_runner.add_to_build_file("3rdparty/python", f"{build_file_entry}\n") target = rule_runner.get_target( Address("3rdparty/python", target_name=expected_target_name)) assert isinstance(target, PythonRequirementLibrary) assert target[PythonRequirementsField].value == ( Requirement.parse(f"{expected_dist}=={pants_version()}"), ) actual_value = target[ModuleMappingField].value assert isinstance(actual_value, FrozenDict) assert actual_value.get(expected_dist) == (expected_module, )
def assert_pipenv_requirements( rule_runner: RuleRunner, build_file_entry: str, pipfile_lock: dict, *, expected_file_dep: PythonRequirementsFile, expected_targets: Iterable[PythonRequirementLibrary], pipfile_lock_relpath: str = "Pipfile.lock", ) -> None: rule_runner.add_to_build_file("", f"{build_file_entry}\n") rule_runner.create_file(pipfile_lock_relpath, dumps(pipfile_lock)) targets = rule_runner.request( Targets, [Specs(AddressSpecs([DescendantAddresses("")]), FilesystemSpecs([]))], ) assert {expected_file_dep, *expected_targets} == set(targets)
def test_invalid_binary(chroot_rule_runner: RuleRunner) -> None: chroot_rule_runner.create_files("src/python/invalid_binary", ["app1.py", "app2.py"]) chroot_rule_runner.add_to_build_file( "src/python/invalid_binary", textwrap.dedent( """ python_library(name='not_a_binary', sources=[]) pex_binary(name='invalid_entrypoint_unowned1', entry_point='app1.py') pex_binary(name='invalid_entrypoint_unowned2', entry_point='invalid_binary.app2') python_distribution( name='invalid_bin1', provides=setup_py( name='invalid_bin1', version='1.1.1' ).with_binaries(foo=':not_a_binary') ) python_distribution( name='invalid_bin2', provides=setup_py( name='invalid_bin2', version='1.1.1' ).with_binaries(foo=':invalid_entrypoint_unowned1') ) python_distribution( name='invalid_bin3', provides=setup_py( name='invalid_bin3', version='1.1.1' ).with_binaries(foo=':invalid_entrypoint_unowned2') ) """ ), ) assert_chroot_error( chroot_rule_runner, Address("src/python/invalid_binary", target_name="invalid_bin1"), InvalidEntryPoint, ) assert_chroot_error( chroot_rule_runner, Address("src/python/invalid_binary", target_name="invalid_bin2"), InvalidEntryPoint, ) assert_chroot_error( chroot_rule_runner, Address("src/python/invalid_binary", target_name="invalid_bin3"), InvalidEntryPoint, )
def test_top_level_python_source_root(rule_runner: RuleRunner) -> None: rule_runner.create_file( "src/proto/protos/f.proto", dedent("""\ syntax = "proto3"; package protos; """), ) rule_runner.add_to_build_file("src/proto/protos", "protobuf_library(python_source_root='.')") assert_files_generated( rule_runner, "src/proto/protos", source_roots=["/", "src/proto"], expected_files=["protos/f_pb2.py"], )
def test_address_specs_file_does_not_belong_to_target( address_specs_rule_runner: RuleRunner, ) -> None: """Even if a file's address file exists and target exist, we should validate that the file actually belongs to that target.""" address_specs_rule_runner.create_file("demo/f.txt") address_specs_rule_runner.add_to_build_file( "demo", dedent("""\ mock_tgt(name='owner', sources=['f.txt']) mock_tgt(name='not_owner') """), ) with pytest.raises(ExecutionError) as exc: resolve_address_specs(address_specs_rule_runner, [AddressLiteralSpec("demo/f.txt", "not_owner")]) assert "does not match a file demo/f.txt" in str(exc.value)
def make_target( rule_runner: RuleRunner, source_files: List[FileContent], *, import_path: str, dependencies: Iterable[Address] = (), target_name: str = "target", ) -> Target: for source_file in source_files: rule_runner.create_file(f"{source_file.path}", source_file.content.decode()) source_files_str = ", ".join(f'"{sf.path}"' for sf in source_files) deps_str = ", ".join(f'"{addr.spec}"' for addr in dependencies) rule_runner.add_to_build_file( "", f"go_package(name='{target_name}', import_path='{import_path}', sources=[{source_files_str}], dependencies=[{deps_str}])\n", ) return rule_runner.get_target(Address("", target_name=target_name))
def test_runtime_package_dependency(rule_runner: RuleRunner) -> None: create_python_binary_target(rule_runner, BINARY_SOURCE) rule_runner.create_file( f"{PACKAGE}/test_binary_call.py", dedent("""\ import subprocess def test_embedded_binary(): assert b"Hello, test!" in subprocess.check_output(args=['./bin.pex']) """), ) rule_runner.add_to_build_file( PACKAGE, "python_tests(runtime_package_dependencies=[':bin'])") tgt = rule_runner.get_target( Address(PACKAGE, relative_file_path="test_binary_call.py")) assert isinstance(tgt, PythonTests) result = run_pytest(rule_runner, tgt, passthrough_args="-s") assert result.exit_code == 0
def test_address_specs_deduplication( address_specs_rule_runner: RuleRunner) -> None: """When multiple specs cover the same address, we should deduplicate to one single Address.""" address_specs_rule_runner.create_file("demo/f.txt") address_specs_rule_runner.add_to_build_file("demo", "mock_tgt(sources=['f.txt'])") # We also include a file address to ensure that that is included in the result. specs = [ AddressLiteralSpec("demo", "demo"), AddressLiteralSpec("demo/f.txt", "demo"), SiblingAddresses("demo"), DescendantAddresses("demo"), AscendantAddresses("demo"), ] assert resolve_address_specs(address_specs_rule_runner, specs) == { Address("demo"), Address("demo", relative_file_path="f.txt"), }
def test_conftest_handling(rule_runner: RuleRunner) -> None: """Tests that we a) inject a dependency on conftest.py and b) skip running directly on conftest.py.""" tgt = create_test_target(rule_runner, [GOOD_SOURCE]) rule_runner.create_file( f"{SOURCE_ROOT}/conftest.py", "def pytest_runtest_setup(item):\n print('In conftest!')\n" ) rule_runner.add_to_build_file(SOURCE_ROOT, "python_tests()") conftest_tgt = rule_runner.get_target(Address(SOURCE_ROOT, relative_file_path="conftest.py")) assert isinstance(conftest_tgt, PythonTests) result = run_pytest(rule_runner, tgt, passthrough_args="-s") assert result.exit_code == 0 assert f"{PACKAGE}/test_good.py In conftest!\n." in result.stdout result = run_pytest(rule_runner, conftest_tgt) assert result.exit_code is None
def test_get_owner_siblings(exporting_owner_rule_runner: RuleRunner) -> None: exporting_owner_rule_runner.add_to_build_file( "src/python/siblings", textwrap.dedent(""" python_library(name='sibling1', sources=[]) python_distribution( name='sibling2', dependencies=['src/python/siblings:sibling1'], provides=setup_py(name='siblings', version='2.2.2'), ) """), ) assert_is_owner(exporting_owner_rule_runner, "src/python/siblings:sibling2", "src/python/siblings:sibling1") assert_is_owner(exporting_owner_rule_runner, "src/python/siblings:sibling2", "src/python/siblings:sibling2")
def test_address_specs_filter_by_tag( address_specs_rule_runner: RuleRunner) -> None: address_specs_rule_runner.create_file("demo/f.txt") address_specs_rule_runner.add_to_build_file( "demo", dedent("""\ mock_tgt(name="a", sources=["f.txt"]) mock_tgt(name="b", sources=["f.txt"], tags=["integration"]) mock_tgt(name="c", sources=["f.txt"], tags=["ignore"]) """), ) bootstrapper = create_options_bootstrapper(args=["--tag=+integration"]) assert resolve_address_specs(address_specs_rule_runner, [SiblingAddresses("demo")], bootstrapper=bootstrapper) == { AddressWithOrigin( Address("demo", target_name="b"), SiblingAddresses("demo")) } # The same filtering should work when given literal addresses, including file addresses. # For file addresses, we look up the `tags` field of the original base target. literals_result = resolve_address_specs( address_specs_rule_runner, [ AddressLiteralSpec("demo", "a"), AddressLiteralSpec("demo", "b"), AddressLiteralSpec("demo", "c"), AddressLiteralSpec("demo/f.txt", "a"), AddressLiteralSpec("demo/f.txt", "b"), AddressLiteralSpec("demo/f.txt", "c"), ], bootstrapper=bootstrapper, ) assert literals_result == { AddressWithOrigin( Address("demo", relative_file_path="f.txt", target_name="b"), AddressLiteralSpec("demo/f.txt", "b"), ), AddressWithOrigin(Address("demo", target_name="b"), AddressLiteralSpec("demo", "b")), }
def make_target( rule_runner: RuleRunner, source_files: List[FileContent], *, name: str = "target", interpreter_constraints: Optional[str] = None, ) -> Target: for source_file in source_files: rule_runner.create_file(source_file.path, source_file.content.decode()) rule_runner.add_to_build_file( "", dedent(f"""\ python_library( name='{name}', interpreter_constraints={[interpreter_constraints] if interpreter_constraints else None}, ) """), ) return rule_runner.get_target(Address("", target_name=name))
def test_bad_python_source_root(rule_runner: RuleRunner) -> None: rule_runner.create_file( "src/protobuf/dir1/f.proto", dedent("""\ syntax = "proto3"; package dir1; """), ) rule_runner.add_to_build_file( "src/protobuf/dir1", "protobuf_library(python_source_root='notasourceroot')") with pytest.raises(ExecutionError) as exc: assert_files_generated(rule_runner, "src/protobuf/dir1", source_roots=["src/protobuf"], expected_files=[]) assert len(exc.value.wrapped_exceptions) == 1 assert isinstance(exc.value.wrapped_exceptions[0], NoSourceRootError)
def test_address_specs_filter_by_exclude_pattern( address_specs_rule_runner: RuleRunner) -> None: address_specs_rule_runner.create_file("demo/f.txt") address_specs_rule_runner.add_to_build_file( "demo", dedent("""\ mock_tgt(name="exclude_me", sources=["f.txt"]) mock_tgt(name="not_me", sources=["f.txt"]) """), ) bootstrapper = create_options_bootstrapper( args=["--exclude-target-regexp=exclude_me.*"]) assert resolve_address_specs(address_specs_rule_runner, [SiblingAddresses("demo")], bootstrapper=bootstrapper) == { AddressWithOrigin( Address("demo", target_name="not_me"), SiblingAddresses("demo")) } # The same filtering should work when given literal addresses, including file addresses. # The filtering will operate against the normalized Address.spec. literals_result = resolve_address_specs( address_specs_rule_runner, [ AddressLiteralSpec("demo", "exclude_me"), AddressLiteralSpec("demo", "not_me"), AddressLiteralSpec("demo/f.txt", "exclude_me"), AddressLiteralSpec("demo/f.txt", "not_me"), ], bootstrapper=bootstrapper, ) assert literals_result == { AddressWithOrigin( Address("demo", relative_file_path="f.txt", target_name="not_me"), AddressLiteralSpec("demo/f.txt", "not_me"), ), AddressWithOrigin(Address("demo", target_name="not_me"), AddressLiteralSpec("demo", "not_me")), }
def test_warn_files_targets(rule_runner: RuleRunner, caplog) -> None: rule_runner.set_options([], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) rule_runner.create_file("assets/f.txt") rule_runner.add_to_build_file( "assets", dedent("""\ files(name='files', sources=['f.txt']) relocated_files( name='relocated', files_targets=[':files'], src='assets', dest='new_assets', ) # Resources are fine. resources(name='resources', sources=['f.txt']) """), ) rule_runner.create_file("src/py/project/__init__.py") rule_runner.create_file("src/py/project/app.py", "print('hello')") rule_runner.add_to_build_file( "src/py/project", dedent("""\ pex_binary( dependencies=['assets:files', 'assets:relocated', 'assets:resources'], entry_point="none", ) """), ) tgt = rule_runner.get_target(Address("src/py/project")) field_set = PexBinaryFieldSet.create(tgt) assert not caplog.records result = rule_runner.request(BuiltPackage, [field_set]) assert caplog.records assert f"The pex_binary target {tgt.address} transitively depends on" in caplog.text assert "assets/f.txt:files" in caplog.text assert "assets:relocated" in caplog.text assert "assets:resources" not in caplog.text assert len(result.artifacts) == 1 assert result.artifacts[0].relpath == "src.py.project/project.pex"
def test_grpc_pre_v2_mypy_plugin(rule_runner: RuleRunner) -> None: rule_runner.create_file( "src/protobuf/dir1/f.proto", dedent(GRPC_PROTO_STANZA), ) rule_runner.add_to_build_file("src/protobuf/dir1", "protobuf_library(grpc=True)") assert_files_generated( rule_runner, "src/protobuf/dir1", source_roots=["src/protobuf"], mypy=True, mypy_plugin_version="mypy-protobuf==1.24", expected_files=[ "src/protobuf/dir1/f_pb2.py", "src/protobuf/dir1/f_pb2.pyi", "src/protobuf/dir1/f_pb2_grpc.py", ], )
def test_raw_output_two_build_files(rule_runner: RuleRunner) -> None: rule_runner.add_to_build_file("project1", "# A comment\nfiles(sources=[])") rule_runner.add_to_build_file("project2", "# Another comment\nfiles(sources=[])") result = rule_runner.run_goal_rule(Peek, args=["--output=raw", "project1", "project2"]) expected_output = dedent( """\ -------------- project1/BUILD -------------- # A comment files(sources=[]) -------------- project2/BUILD -------------- # Another comment files(sources=[]) """ ) assert result.stdout == expected_output
def test_map_third_party_modules_to_addresses(rule_runner: RuleRunner) -> None: rule_runner.add_to_build_file( "3rdparty/python", dedent("""\ python_requirement_library( name='ansicolors', requirements=['ansicolors==1.21'], module_mapping={'ansicolors': ['colors']}, ) python_requirement_library( name='req1', requirements=['req1', 'two_owners'], ) python_requirement_library( name='un_normalized', requirements=['Un-Normalized-Project>3', 'two_owners'], ) python_requirement_library( name='direct_references', requirements=[ 'pip@ git+https://github.com/pypa/pip.git', 'local_dist@ file:///path/to/dist.whl', ], ) """), ) result = rule_runner.request(ThirdPartyModuleToAddressMapping, []) assert result.mapping == FrozenDict({ "colors": Address("3rdparty/python", target_name="ansicolors"), "local_dist": Address("3rdparty/python", target_name="direct_references"), "pip": Address("3rdparty/python", target_name="direct_references"), "req1": Address("3rdparty/python", target_name="req1"), "un_normalized_project": Address("3rdparty/python", target_name="un_normalized"), })
def assert_pants_requirement( rule_runner: RuleRunner, build_file_entry: str, *, expected_target_name: str, expected_dist: str = "pantsbuild.pants", expected_module: str = "pants", ) -> None: rule_runner.add_to_build_file("3rdparty/python", f"{build_file_entry}\n") target = rule_runner.request_product( WrappedTarget, [ Address("3rdparty/python", target_name=expected_target_name), create_options_bootstrapper(), ], ).target assert isinstance(target, PythonRequirementLibrary) assert target[PythonRequirementsField].value == ( Requirement.parse(f"{expected_dist}=={pants_version()}"), ) assert target[ModuleMappingField].value == FrozenDict( {expected_dist: (expected_module, )})
def test_get_owner_multiple_ancestor_generations( exporting_owner_rule_runner: RuleRunner) -> None: exporting_owner_rule_runner.add_to_build_file( "src/python/aaa/bbb/ccc", textwrap.dedent(""" python_library(name='ccc', sources=[]) """), ) exporting_owner_rule_runner.add_to_build_file( "src/python/aaa/bbb", textwrap.dedent(""" python_distribution( name='bbb', dependencies=['src/python/aaa/bbb/ccc'], provides=setup_py(name='bbb', version='1.1.1'), ) """), ) exporting_owner_rule_runner.add_to_build_file( "src/python/aaa", textwrap.dedent(""" python_distribution( name='aaa', dependencies=['src/python/aaa/bbb/ccc'], provides=setup_py(name='aaa', version='2.2.2'), ) """), ) assert_is_owner(exporting_owner_rule_runner, "src/python/aaa/bbb", Address("src/python/aaa/bbb/ccc")) assert_is_owner(exporting_owner_rule_runner, "src/python/aaa/bbb", Address("src/python/aaa/bbb")) assert_is_owner(exporting_owner_rule_runner, "src/python/aaa", Address("src/python/aaa"))
def test_uses_correct_python_version(rule_runner: RuleRunner) -> None: """We set `--python-version` automatically for the user, and also batch based on interpreter constraints. This batching must consider transitive dependencies, so we use a more complex setup where the dependencies are what have specific constraints that influence the batching. """ rule_runner.create_file(f"{PACKAGE}/py2/__init__.py") rule_runner.create_file( f"{PACKAGE}/py2/lib.py", dedent("""\ def add(x, y): # type: (int, int) -> int print "old school" return x + y """), ) rule_runner.add_to_build_file( f"{PACKAGE}/py2", "python_library(interpreter_constraints=['==2.7.*'])") rule_runner.create_file(f"{PACKAGE}/py3/__init__.py") rule_runner.create_file( f"{PACKAGE}/py3/lib.py", dedent("""\ def add(x: int, y: int) -> int: return x + y """), ) rule_runner.add_to_build_file( f"{PACKAGE}/py3", "python_library(interpreter_constraints=['>=3.6'])") # Our input files belong to the same target, which is compatible with both Py2 and Py3. rule_runner.create_file(f"{PACKAGE}/__init__.py") rule_runner.create_file( f"{PACKAGE}/uses_py2.py", "from project.py2.lib import add\nassert add(2, 2) == 4\n") rule_runner.create_file( f"{PACKAGE}/uses_py3.py", "from project.py3.lib import add\nassert add(2, 2) == 4\n") rule_runner.add_to_build_file( PACKAGE, "python_library(interpreter_constraints=['==2.7.*', '>=3.6'])") py2_target = rule_runner.get_target( Address(PACKAGE, relative_file_path="uses_py2.py")) py3_target = rule_runner.get_target( Address(PACKAGE, relative_file_path="uses_py3.py")) result = run_mypy(rule_runner, [py2_target, py3_target]) assert len(result) == 2 py2_result, py3_result = sorted( result, key=lambda res: res.partition_description or "") assert py2_result.exit_code == 0 assert py2_result.partition_description == "['CPython==2.7.*', 'CPython==2.7.*,>=3.6']" assert "Success: no issues found" in py3_result.stdout assert py3_result.exit_code == 0 assert py3_result.partition_description == "['CPython==2.7.*,>=3.6', 'CPython>=3.6']" assert "Success: no issues found" in py3_result.stdout.strip()