def test_lockfile_validation(rule_runner: RuleRunner) -> None: """Check that we properly load and validate lockfile metadata for both types of locks. Note that we don't exhaustively test every source of lockfile failure nor the different options for `--invalid-lockfile-behavior`, as those are already tested in pex_requirements_test.py. """ # We create a lockfile that claims it works with no requirements. It should fail when we try # to build a PEX with a requirement. lock_content = PythonLockfileMetadata.new(InterpreterConstraints(), set()).add_header_to_lockfile( b"", regenerate_command="regen", delimeter="#") rule_runner.write_files({"lock.txt": lock_content.decode()}) lockfile = Lockfile( "lock.txt", file_path_description_of_origin="a test", resolve_name="a", req_strings=FrozenOrderedSet("ansicolors"), ) with engine_error(InvalidLockfileError): create_pex_and_get_all_data(rule_runner, requirements=lockfile) lockfile_content = LockfileContent( FileContent("lock.txt", lock_content), resolve_name="a", req_strings=FrozenOrderedSet("ansicolors"), ) with engine_error(InvalidLockfileError): create_pex_and_get_all_data(rule_runner, requirements=lockfile_content)
def test_find_thrift_python_requirement(rule_runner: RuleRunner) -> None: rule_runner.write_files({"codegen/dir/f.thrift": "", "codegen/dir/BUILD": "thrift_sources()"}) rule_runner.set_options( ["--python-resolves={'python-default': '', 'another': ''}", "--python-enable-resolves"] ) thrift_tgt = rule_runner.get_target(Address("codegen/dir", relative_file_path="f.thrift")) request = InjectApacheThriftPythonDependencies(thrift_tgt[Dependencies]) # Start with no relevant requirements. with engine_error(MissingPythonCodegenRuntimeLibrary): rule_runner.request(InjectedDependencies, [request]) # If exactly one, match it. rule_runner.write_files({"reqs1/BUILD": "python_requirement(requirements=['thrift'])"}) assert rule_runner.request(InjectedDependencies, [request]) == InjectedDependencies( [Address("reqs1")] ) # Multiple is fine if from other resolve. rule_runner.write_files( {"another_resolve/BUILD": "python_requirement(requirements=['thrift'], resolve='another')"} ) assert rule_runner.request(InjectedDependencies, [request]) == InjectedDependencies( [Address("reqs1")] ) # If multiple from the same resolve, error. rule_runner.write_files({"reqs2/BUILD": "python_requirement(requirements=['thrift'])"}) with engine_error( AmbiguousPythonCodegenRuntimeLibrary, contains="['reqs1:reqs1', 'reqs2:reqs2']" ): rule_runner.request(InjectedDependencies, [request])
def test_resolve_handler(rule_runner: RuleRunner) -> None: def assert_resolved(handler: str, *, expected: str, is_file: bool) -> None: addr = Address("src/python/project") rule_runner.write_files({ "src/python/project/lambda.py": "", "src/python/project/f2.py": "" }) field = PythonAwsLambdaHandlerField(handler, addr) result = rule_runner.request(ResolvedPythonAwsHandler, [ResolvePythonAwsHandlerRequest(field)]) assert result.val == expected assert result.file_name_used == is_file assert_resolved("path.to.lambda:func", expected="path.to.lambda:func", is_file=False) assert_resolved("lambda.py:func", expected="project.lambda:func", is_file=True) with engine_error(contains="Unmatched glob"): assert_resolved("doesnt_exist.py:func", expected="doesnt matter", is_file=True) # Resolving >1 file is an error. with engine_error(InvalidFieldException): assert_resolved("*.py:func", expected="doesnt matter", is_file=True)
def test_choose_compatible_resolve(rule_runner: RuleRunner) -> None: def create_target_files( directory: str, *, req_resolve: str, source_resolve: str, test_resolve: str ) -> dict[str | PurePath, str | bytes]: return { f"{directory}/BUILD": dedent( f"""\ python_source(name="dep", source="dep.py", resolve="{source_resolve}") python_requirement( name="req", requirements=[], resolve="{req_resolve}" ) python_test( name="test", source="tests.py", dependencies=[":dep", ":req"], resolve="{test_resolve}", ) """ ), f"{directory}/tests.py": "", f"{directory}/dep.py": "", } rule_runner.set_options( ["--python-resolves={'a': '', 'b': ''}", "--python-enable-resolves"], env_inherit={"PATH"} ) rule_runner.write_files( { # Note that each of these BUILD files are entirely self-contained. **create_target_files("valid", req_resolve="a", source_resolve="a", test_resolve="a"), **create_target_files( "invalid", req_resolve="a", source_resolve="a", test_resolve="b", ), } ) def choose_resolve(addresses: list[Address]) -> str: return rule_runner.request( ChosenPythonResolve, [ChosenPythonResolveRequest(Addresses(addresses))] ).name assert choose_resolve([Address("valid", target_name="test")]) == "a" assert choose_resolve([Address("valid", target_name="dep")]) == "a" assert choose_resolve([Address("valid", target_name="req")]) == "a" with engine_error(NoCompatibleResolveException, contains="its dependencies are not compatible"): choose_resolve([Address("invalid", target_name="test")]) with engine_error(NoCompatibleResolveException, contains="its dependencies are not compatible"): choose_resolve([Address("invalid", target_name="dep")]) with engine_error( NoCompatibleResolveException, contains="input targets did not have a resolve" ): choose_resolve( [Address("invalid", target_name="req"), Address("invalid", target_name="dep")] )
def test_determine_main_pkg_for_go_binary(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "go.mod": dedent("""\ module example.com/foo go 1.17 """), "BUILD": "go_mod(name='mod')", "explicit/f.go": "", "explicit/BUILD": "go_binary(main=':pkg')\ngo_package(name='pkg')", "inferred/f.go": "", "inferred/BUILD": "go_binary()\ngo_package(name='pkg')", "ambiguous/f.go": "", "ambiguous/BUILD": "go_binary()\ngo_package(name='pkg1')\ngo_package(name='pkg2')", # Note there are no `.go` files in this dir. "missing/BUILD": "go_binary()", "explicit_wrong_type/BUILD": dedent("""\ target(name='dep') go_binary(main=':dep') """), }) def get_main(addr: Address) -> Address: tgt = rule_runner.get_target(addr) main_addr = rule_runner.request( GoBinaryMainPackage, [GoBinaryMainPackageRequest(tgt[GoBinaryMainPackageField]) ]).address injected_addresses = rule_runner.request( InjectedDependencies, [InjectGoBinaryMainDependencyRequest(tgt[Dependencies])]) assert [main_addr] == list(injected_addresses) return main_addr assert get_main(Address("explicit")) == Address("explicit", target_name="pkg") assert get_main(Address("inferred")) == Address("inferred", target_name="pkg") with engine_error(ResolveError, contains="none were found"): get_main(Address("missing")) with engine_error(ResolveError, contains="There are multiple `go_package` targets"): get_main(Address("ambiguous")) with engine_error(InvalidFieldException, contains="must point to a `go_package` target"): get_main(Address("explicit_wrong_type"))
def test_conflicting_scala_library_triggers_error(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "foo/BUILD": textwrap.dedent( """\ scala_sources(resolve='foo') jvm_artifact( name="org.scala-lang_scala-library_2.13.1", group="org.scala-lang", artifact="scala-library", version="2.13.1", resolve="foo", ) """ ), "foo/Foo.scala": "package foo", } ) with engine_error( ValueError, contains="The JVM resolve `foo` contains a `jvm_artifact` for version 2.13.1 of the Scala runtime", ): _ = rule_runner.request( UserGenerateLockfiles, [RequestedJVMUserResolveNames(["foo"])], )
def test_cgo_not_supported(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": "go_mod(name='mod')\n", "go.mod": dedent("""\ module go.example.com/foo go 1.17 """), "hello.go": dedent("""\ package main // int fortytwo() // { // return 42; // } import "C" import "fmt" func main() { f := C.intFunc(C.fortytwo) fmt.Println(C.intFunc(C.fortytwo)) } """), }) with engine_error(NotImplementedError): rule_runner.request( FallibleFirstPartyPkgInfo, [ FirstPartyPkgInfoRequest( Address("", target_name="mod", generated_name="./")) ], )
def test_filter_by_target_type(rule_runner: RuleRunner, caplog) -> None: rule_runner.write_files({ "BUILD": dedent("""\ tgt(name="tgt") nonfile_generator(name="nonfile") """), }) assert_targets(rule_runner, {"//:tgt"}, target_type=["tgt"]) assert_targets(rule_runner, {"//:nonfile"}, target_type=["+nonfile_generator"]) assert_targets(rule_runner, {"//:tgt", "//:nonfile#gen"}, target_type=["-nonfile_generator"]) # The comma is inside the string, so these are ORed. assert_targets(rule_runner, {"//:tgt", "//:nonfile"}, target_type=["tgt,nonfile_generator"]) # A target can only have one type, so this output should be empty. assert_targets(rule_runner, set(), target_type=["tgt", "nonfile_generator"]) # Deprecated aliases works too. caplog.clear() assert_targets(rule_runner, {"//:tgt"}, target_type=["deprecated_tgt"]) assert caplog.records assert "`--filter-target-type=deprecated_tgt`" in caplog.text with engine_error(UnrecognizedTargetTypeException): assert_targets(rule_runner, set(), target_type=["invalid"])
def test_user_repo_order_is_respected(rule_runner: RuleRunner) -> None: """Tests that the repo resolution order issue found in #14577 is avoided.""" jai_core = Coordinate(group="javax.media", artifact="jai_core", version="1.1.3") # `repo1.maven.org` has a bogus POM that Coursier hits first # `repo.osgeo.org` has a valid POM and should succeed rule_runner.set_options( args=[ """--coursier-repos=['https://repo1.maven.org/maven2', 'https://repo.osgeo.org/repository/release']""" ], env_inherit=PYTHON_BOOTSTRAP_ENV, ) with engine_error(ProcessExecutionFailure): rule_runner.request( CoursierResolvedLockfile, [ ArtifactRequirements.from_coordinates([jai_core]), ], ) rule_runner.set_options( args=[ """--coursier-repos=['https://repo.osgeo.org/repository/release', 'https://repo1.maven.org/maven2']""" ], env_inherit=PYTHON_BOOTSTRAP_ENV, ) rule_runner.request( CoursierResolvedLockfile, [ ArtifactRequirements.from_coordinates([jai_core]), ], )
def test_raw_specs_with_only_file_owners_nonexistent_file( rule_runner: RuleRunner) -> None: spec = FileLiteralSpec("demo/fake.txt") with engine_error(contains='Unmatched glob from tests: "demo/fake.txt"'): resolve_raw_specs_with_only_file_owners(rule_runner, [spec]) assert not resolve_raw_specs_with_only_file_owners(rule_runner, [spec], ignore_nonexistent=True)
def test_pkg_missing(rule_runner: RuleRunner) -> None: digest = set_up_go_mod(rule_runner, GO_MOD, GO_SUM) with engine_error( AssertionError, contains="The package `another_project.org/foo` was not downloaded" ): rule_runner.request( ThirdPartyPkgInfo, [ThirdPartyPkgInfoRequest("another_project.org/foo", digest)])
def test_sources_expected_num_files(sources_rule_runner: RuleRunner) -> None: sources_rule_runner.write_files({ f: "" for f in [ "f1.txt", "f2.txt", "dirA/f3.txt", "dirB/f4.txt", "dirC/f5.txt", "dirC/f6.txt", ] }) def hydrate(sources_cls: Type[DebianSources], sources: Iterable[str]) -> HydratedSources: return sources_rule_runner.request( HydratedSources, [ HydrateSourcesRequest( sources_cls(sources, Address("", target_name="example"))), ], ) with engine_error(contains="must resolve to at least one file"): hydrate(DebianSources, []) with engine_error(contains="must resolve to at least one file"): hydrate(DebianSources, ["non-existing-dir/*"]) with engine_error(contains="Individual files were found"): hydrate(DebianSources, ["f1.txt", "f2.txt"]) with engine_error(contains="Multiple directories were found"): hydrate(DebianSources, ["dirA/f3.txt", "dirB/f4.txt"]) # Also check that we support valid sources declarations. assert hydrate(DebianSources, ["dirC/f5.txt", "dirC/f6.txt"]).snapshot.files == ( "dirC/f5.txt", "dirC/f6.txt", ) assert hydrate(DebianSources, ["dirC/*"]).snapshot.files == ("dirC/f5.txt", "dirC/f6.txt")
def test_invalid_req(rule_runner: RuleRunner) -> None: """Test that we give a nice error message.""" with engine_error( contains="Invalid requirement 'Not A Valid Req == 3.7' in requirements.txt at line 3" ): assert_python_requirements( rule_runner, "python_requirements(name='reqs')", "\n\nNot A Valid Req == 3.7", expected_targets=set(), ) # Give a nice error message if it looks like they're using pip VCS-style requirements. with engine_error(contains="It looks like you're trying to use a pip VCS-style requirement?"): assert_python_requirements( rule_runner, "python_requirements(name='reqs')", "git+https://github.com/pypa/pip.git#egg=pip", expected_targets=set(), )
def test_infer_python_strict_multiple_resolves(imports_rule_runner: RuleRunner) -> None: imports_rule_runner.write_files( { "project/base.py": "", "project/utils.py": "", "project/app.py": "import project.base\nimport project.utils", "project/BUILD": dedent( """\ python_source( name="base", source="base.py", resolve="a", ) python_source( name="utils", source="utils.py", resolve=parametrize("a", "b"), ) python_source( name="app", source="app.py", resolve="z", ) """ ), } ) imports_rule_runner.set_options( [ "--python-infer-unowned-dependency-behavior=error", "--python-enable-resolves", "--python-resolves={'a': '', 'b': '', 'z': ''}", ], env_inherit=PYTHON_BOOTSTRAP_ENV, ) tgt = imports_rule_runner.get_target(Address("project", target_name="app")) expected_error = softwrap( """ These imports are not in the resolve used by the target (`z`), but they were present in other resolves: * project.base: 'a' from project:base * project.utils: 'a' from project:utils@resolve=a, 'b' from project:utils@resolve=b """ ) with engine_error(UnownedDependencyError, contains=expected_error): imports_rule_runner.request( InferredDependencies, [InferPythonImportDependencies(tgt[PythonSourceField])] )
def test_bad_dict_format(rule_runner: RuleRunner) -> None: with engine_error(contains="not formatted correctly; at"): assert_poetry_requirements( rule_runner, "poetry_requirements(name='reqs')", """ [tool.poetry.dependencies] foo = {bad_req = "test"} [tool.poetry.dev-dependencies] """, expected_targets=set(), )
def test_bad_req_type(rule_runner: RuleRunner) -> None: with engine_error(contains="was of type int"): assert_poetry_requirements( rule_runner, "poetry_requirements(name='reqs')", """ [tool.poetry.dependencies] foo = 4 [tool.poetry.dev-dependencies] """, expected_targets=set(), )
def test_invalid_req(rule_runner: RuleRunner) -> None: """Test that we give a nice error message.""" with engine_error( contains= "Invalid requirement 'Not A Valid Req == 3.7' in requirements.txt at line 3" ): assert_python_requirements( rule_runner, "python_requirements(name='reqs')", "\n\nNot A Valid Req == 3.7", expected_targets=set(), )
def test_missing_scala_library_triggers_error(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "foo/BUILD": "scala_sources(resolve='foo')", "foo/Foo.scala": "package foo", } ) with engine_error(ValueError, contains="does not contain a requirement for the Scala runtime"): _ = rule_runner.request( UserGenerateLockfiles, [RequestedJVMUserResolveNames(["foo"])], )
def test_non_pep440_error(rule_runner: RuleRunner) -> None: with engine_error( contains= 'Failed to parse requirement foo = "~r62b" in pyproject.toml'): assert_poetry_requirements( rule_runner, "poetry_requirements(name='reqs')", """ [tool.poetry.dependencies] foo = "~r62b" [tool.poetry.dev-dependencies] """, expected_targets=set(), )
def test_resolve_conflicting(rule_runner: RuleRunner) -> None: with engine_error( ProcessExecutionFailure, contains="Resolution error: Unsatisfied rule Strict(junit:junit)"): rule_runner.request( CoursierResolvedLockfile, [ ArtifactRequirements([ Coordinate( group="junit", artifact="junit", version="4.8.1"), Coordinate( group="junit", artifact="junit", version="4.8.2"), ]), ], )
def test_invalid_go_sum(rule_runner: RuleRunner) -> None: digest = set_up_go_mod( rule_runner, dedent("""\ module example.com/third-party-module go 1.17 require github.com/google/uuid v1.3.0 """), dedent("""\ github.com/google/uuid v1.3.0 h1:00000gmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:00000e4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= """), ) with engine_error(ProcessExecutionFailure, contains="SECURITY ERROR"): rule_runner.request(AllThirdPartyPackages, [AllThirdPartyPackagesRequest(digest, "go.mod")])
def test_bad_python_source_root(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "src/protobuf/dir1/f.proto": dedent("""\ syntax = "proto3"; package dir1; """), "src/protobuf/dir1/BUILD": "protobuf_sources(python_source_root='notasourceroot')", }) with engine_error(NoSourceRootError): assert_files_generated( rule_runner, Address("src/protobuf/dir1", relative_file_path="f.proto"), source_roots=["src/protobuf"], expected_files=[], )
def test_missing_go_sum(rule_runner: RuleRunner) -> None: digest = set_up_go_mod( rule_runner, dedent("""\ module example.com/third-party-module go 1.17 require github.com/google/uuid v1.3.0 """), # `go.sum` is for a different module. dedent("""\ cloud.google.com/go v0.26.0 h1:e0WKqKTd5BnrG8aKH3J3h+QvEIQtSUcf2n5UZ5ZgLtQ= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= """), ) with engine_error( contains="github.com/google/[email protected]: missing go.sum entry"): rule_runner.request(AllThirdPartyPackages, [AllThirdPartyPackagesRequest(digest, "go.mod")])
def test_eagerly_validate_roots_have_common_resolve( rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ python_requirement(name='t1', requirements=[], resolve='a') python_source(name='t2', source='f.py', resolve='b') """) }) with engine_error(NoCompatibleResolveException, contains="./pants peek"): run_repl( rule_runner, ["//:t1", "//:t2"], global_args=[ "--python-resolves={'a': '', 'b': ''}", "--python-enable-resolves" ], )
def test_third_party_dep_inference_with_incorrect_provides( rule_runner: RuleRunner) -> None: rule_runner.set_options( [ "--java-infer-third-party-import-mapping={'org.joda.time.**':'joda-time:joda-time', 'org.joda.time.DateTime':'joda-time:joda-time-2'}", ], env_inherit=PYTHON_BOOTSTRAP_ENV, ) rule_runner.write_files({ "BUILD": dedent("""\ jvm_artifact( name = "joda-time_joda-time", group = "joda-time", artifact = "joda-time", version = "2.10.10", ) java_sources( name = 'lib', experimental_provides_types = ['org.joda.time.DateTime', ], ) """), "PrintDate.java": dedent("""\ package org.pantsbuild.example; import org.joda.time.DateTime; public class PrintDate { public static void main(String[] args) { DateTime dt = new DateTime(); System.out.println(dt.toString()); } } """), }) lib1 = rule_runner.get_target( Address("", target_name="lib", relative_file_path="PrintDate.java")) with engine_error(JvmFirstPartyPackageMappingException): rule_runner.request(Addresses, [DependenciesRequest(lib1[Dependencies])])
def test_filter_by_address_regex(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "dir1/BUILD": "tgt(name='lib')", "dir2/BUILD": "tgt(name='lib')", "common/BUILD": "tgt(name='tests')", }) assert_targets(rule_runner, {"dir1:lib", "dir2:lib"}, address_regex=[r"^dir"]) assert_targets(rule_runner, {"dir1:lib"}, address_regex=[r"+dir1:lib$"]) assert_targets(rule_runner, {"common:tests"}, address_regex=["-dir"]) # The comma ORs the regex. assert_targets(rule_runner, {"common:tests", "dir1:lib"}, address_regex=["dir1,common"]) # This ANDs the regex. assert_targets(rule_runner, {"dir2:lib"}, address_regex=[r"^dir", "2:lib$"]) # Invalid regex. with engine_error(re.error): assert_targets(rule_runner, set(), address_regex=["("])
def test_stale_go_mod(rule_runner: RuleRunner) -> None: digest = set_up_go_mod( rule_runner, # Go 1.17+ expects indirect dependencies to be included in the `go.mod`, i.e. # `golang.org/x/xerrors `. dedent("""\ module example.com/third-party-module go 1.17 require github.com/google/go-cmp v0.5.6 """), dedent("""\ github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= """), ) with engine_error(ProcessExecutionFailure, contains="updates to go.mod needed"): rule_runner.request(AllThirdPartyPackages, [AllThirdPartyPackagesRequest(digest, "go.mod")])
def test_filter_by_tag_regex(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ tgt(name="no-tags") tgt(name="t1", tags=["tag1"]) tgt(name="t2", tags=["tag2"]) tgt(name="both", tags=["tag1", "tag2"]) """), }) assert_targets(rule_runner, {"//:both", "//:t2"}, tag_regex=[r"t.?g2$"]) assert_targets(rule_runner, {"//:both", "//:t1"}, tag_regex=["+tag1"]) assert_targets(rule_runner, {"//:no-tags"}, tag_regex=["-tag"]) # The comma ORs the regex. assert_targets(rule_runner, {"//:both", "//:t1", "//:t2"}, tag_regex=[r"t.?g2$,tag1"]) # This ANDs the regex. assert_targets(rule_runner, {"//:both"}, tag_regex=[r"t.?g2$", "tag1"]) # Invalid regex. with engine_error(re.error): assert_targets(rule_runner, set(), tag_regex=["("])
def test_find_protobuf_python_requirement() -> None: rule_runner = RuleRunner( rules=[ *python_protobuf_subsystem.rules(), *target_types.rules(), *module_mapper.rules(), *stripped_source_files.rules(), QueryRule(InjectedDependencies, (InjectPythonProtobufDependencies, )), ], target_types=[ProtobufSourcesGeneratorTarget, PythonRequirementTarget], ) rule_runner.write_files({ "codegen/dir/f.proto": "", "codegen/dir/BUILD": "protobuf_sources(grpc=True)" }) rule_runner.set_options([ "--python-resolves={'python-default': '', 'another': ''}", "--python-enable-resolves" ]) proto_tgt = rule_runner.get_target( Address("codegen/dir", relative_file_path="f.proto")) request = InjectPythonProtobufDependencies(proto_tgt[Dependencies]) # Start with no relevant requirements. with engine_error(MissingPythonCodegenRuntimeLibrary, contains="protobuf"): rule_runner.request(InjectedDependencies, [request]) rule_runner.write_files( {"proto1/BUILD": "python_requirement(requirements=['protobuf'])"}) with engine_error(MissingPythonCodegenRuntimeLibrary, contains="grpcio"): rule_runner.request(InjectedDependencies, [request]) # If exactly one, match it. rule_runner.write_files( {"grpc1/BUILD": "python_requirement(requirements=['grpc'])"}) assert rule_runner.request(InjectedDependencies, [request]) == InjectedDependencies( [Address("proto1"), Address("grpc1")]) # Multiple is fine if from other resolve. rule_runner.write_files({ "another_resolve/BUILD": ("python_requirement(name='r1', requirements=['protobuf'], resolve='another')\n" "python_requirement(name='r2', requirements=['grpc'], resolve='another')\n" ) }) assert rule_runner.request(InjectedDependencies, [request]) == InjectedDependencies( [Address("proto1"), Address("grpc1")]) # If multiple from the same resolve, error. rule_runner.write_files( {"grpc2/BUILD": "python_requirement(requirements=['grpc'])"}) with engine_error(AmbiguousPythonCodegenRuntimeLibrary, contains="['grpc1:grpc1', 'grpc2:grpc2']"): rule_runner.request(InjectedDependencies, [request]) rule_runner.write_files( {"proto2/BUILD": "python_requirement(requirements=['protobuf'])"}) with engine_error(AmbiguousPythonCodegenRuntimeLibrary, contains="['proto1:proto1', 'proto2:proto2']"): rule_runner.request(InjectedDependencies, [request])
def assert_resolve_error(specs: Iterable[AddressSpec], *, expected: str) -> None: with engine_error(contains=expected): resolve_address_specs(address_specs_rule_runner, specs)