def test_generate_request() -> None: def do_test(expected_url: str, expected_length: int, expected_sha256: str, plat: Platform, version: str) -> None: foobar = create_subsystem(FooBar, version=version, known_versions=FooBar.default_known_versions) assert (ExternalToolRequest( DownloadFile(url=expected_url, expected_digest=FileDigest(expected_sha256, expected_length)), f"foobar-{version}/bin/foobar", ) == foobar.get_request(plat)) do_test( "https://foobar.org/bin/v3.2.0/foobar-3.2.0-osx-x86_64.tgz", 123346, "1102324cdaacd589e50b8b7770595f220f54e18a1d76ee3c445198f80ab865b8", Platform.darwin, "3.2.0", ) do_test( "https://foobar.org/bin/v3.4.7/foobar-3.4.7-linux-x86_64.tgz", 134213, "a019dfc4b32d63c1392aa264aed2253c1e0c2fb09216f8e2cc269bbfb8bb49b5", Platform.linux, "3.4.7", ) with pytest.raises(UnknownVersion): create_subsystem( FooBar, version="9.9.9", known_versions=FooBar.default_known_versions).get_request( Platform.darwin)
def test_all_roots_with_root_at_buildroot() -> None: source_root_config = create_subsystem( SourceRootConfig, root_patterns=["/"], marker_filenames=[], ) # This function mocks out reading real directories off the file system def provider_rule(_: PathGlobs) -> Snapshot: dirs = ("foo", ) # A python package at the buildroot. return Snapshot(Digest("abcdef", 10), (), dirs) output = run_rule_with_mocks( all_roots, rule_args=[source_root_config], mock_gets=[ MockGet(product_type=Snapshot, subject_type=PathGlobs, mock=provider_rule), MockGet( product_type=OptionalSourceRoot, subject_type=SourceRootRequest, mock=lambda req: OptionalSourceRoot(SourceRoot(".")), ), ], ) assert {SourceRoot(".")} == set(output)
def validate(extra_requirements: list[str] | None = None) -> None: extra_reqs_rv = (RankedValue(Rank.CONFIG, extra_requirements) if extra_requirements is not None else RankedValue( Rank.HARDCODED, PyTest.default_extra_requirements)) pytest = create_subsystem(PyTest, extra_requirements=extra_reqs_rv) pytest.validate_pytest_cov_included()
def run_create_coverage_config_rule(coverage_config: Optional[str]) -> str: coverage = create_subsystem( CoverageSubsystem, config="some_file" if coverage_config else None) resolved_config: List[str] = [] def mock_handle_config(request: CreateDigest) -> Digest: assert len(request) == 1 assert request[0].path == ".coveragerc" assert request[0].is_executable is False resolved_config.append(request[0].content.decode()) return Digest("jerry", 30) def mock_read_config(_: PathGlobs) -> DigestContents: # This shouldn't be called if no config file provided. assert coverage_config is not None return DigestContents([ FileContent(path="/dev/null/prelude", content=coverage_config.encode()) ]) mock_gets = [ MockGet(product_type=DigestContents, subject_type=PathGlobs, mock=mock_read_config), MockGet(product_type=Digest, subject_type=CreateDigest, mock=mock_handle_config), ] result = run_rule_with_mocks(create_coverage_config, rule_args=[coverage], mock_gets=mock_gets) assert result.digest.fingerprint == "jerry" assert len(resolved_config) == 1 return resolved_config[0]
def run_typecheck_rule( *, request_types: Sequence[Type[CheckRequest]], targets: list[Target], only: list[str] | None = None, ) -> Tuple[int, str]: union_membership = UnionMembership({CheckRequest: request_types}) check_subsystem = create_subsystem(CheckSubsystem, only=only or []) with mock_console(create_options_bootstrapper()) as (console, stdio_reader): rule_runner = RuleRunner() result: Check = run_rule_with_mocks( check, rule_args=[ console, Workspace(rule_runner.scheduler, _enforce_effects=False), Targets(targets), DistDir(relpath=Path("dist")), union_membership, check_subsystem, ], mock_gets=[ MockGet( output_type=CheckResults, input_type=CheckRequest, mock=lambda field_set_collection: field_set_collection.check_results, ), ], union_membership=union_membership, ) assert not stdio_reader.get_stdout() return result.exit_code, stdio_reader.get_stderr()
def test_group_field_sets_by_constraints_with_unsorted_inputs() -> None: py3_fs = [ MockFieldSet.create_for_test( Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*"), MockFieldSet.create_for_test( Address("src/python/b_dir/path.py", target_name="test"), ">2.7,<3"), MockFieldSet.create_for_test( Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*"), ] ic_36 = PexInterpreterConstraints([Requirement.parse("CPython==3.6.*")]) output = PexInterpreterConstraints.group_field_sets_by_constraints( py3_fs, python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), ) assert output[ic_36] == ( MockFieldSet.create_for_test( Address("src/python/a_dir/path.py", target_name="test"), "==3.6.*"), MockFieldSet.create_for_test( Address("src/python/c_dir/path.py", target_name="test"), "==3.6.*"), )
def test_no_compatible_resolve_error() -> None: python_setup = create_subsystem(PythonSetup, resolves={"a": "", "b": ""}, enable_resolves=True) targets = [ PythonRequirementTarget( {PythonRequirementsField.alias: [], PythonResolveField.alias: "a"}, Address("", target_name="t1"), ), PythonSourceTarget( {PythonSourceField.alias: "f.py", PythonResolveField.alias: "a"}, Address("", target_name="t2"), ), PythonSourceTarget( {PythonSourceField.alias: "f.py", PythonResolveField.alias: "b"}, Address("", target_name="t3"), ), ] assert str(NoCompatibleResolveException(python_setup, "Prefix", targets)).startswith( dedent( """\ Prefix: a: * //:t1 * //:t2 b: * //:t3 """ ) )
def create_python_setup(behavior: InvalidLockfileBehavior, *, enable_resolves: bool = True) -> PythonSetup: return create_subsystem( PythonSetup, invalid_lockfile_behavior=behavior, resolves_generate_lockfiles=enable_resolves, interpreter_versions_universe=PythonSetup.default_interpreter_universe, )
def single_target_run( rule_runner: RuleRunner, address: Address, *, program_text: bytes, ) -> Run: workspace = Workspace(rule_runner.scheduler, _enforce_effects=False) class TestRunFieldSet(RunFieldSet): required_fields = () class TestBinaryTarget(Target): alias = "binary" core_fields = () target = TestBinaryTarget({}, address) field_set = TestRunFieldSet.create(target) with mock_console(rule_runner.options_bootstrapper) as (console, _): res = run_rule_with_mocks( run, rule_args=[ create_goal_subsystem(RunSubsystem, args=[], cleanup=True), create_subsystem(GlobalOptions, pants_workdir=rule_runner.pants_workdir, process_cleanup=True), workspace, BuildRoot(), rule_runner.environment, ], mock_gets=[ MockGet( output_type=TargetRootsToFieldSets, input_type=TargetRootsToFieldSetsRequest, mock=lambda _: TargetRootsToFieldSets( {target: [field_set]}), ), MockGet( output_type=WrappedTarget, input_type=WrappedTargetRequest, mock=lambda _: WrappedTarget(target), ), MockGet( output_type=RunRequest, input_type=TestRunFieldSet, mock=lambda _: create_mock_run_request( rule_runner, program_text), ), MockEffect( output_type=InteractiveProcessResult, input_type=InteractiveProcess, mock=rule_runner.run_interactive_process, ), ], ) return cast(Run, res)
def do_test(expected_url: str, expected_length: int, expected_sha256: str, plat: Platform, version: str) -> None: foobar = create_subsystem(FooBar, version=version, known_versions=FooBar.default_known_versions) assert (ExternalToolRequest( DownloadFile(url=expected_url, expected_digest=FileDigest(expected_sha256, expected_length)), f"foobar-{version}/bin/foobar", ) == foobar.get_request(plat))
def run_publish( rule_runner: RuleRunner, address: Address, options: dict | None = None ) -> tuple[PublishProcesses, DockerBinary]: opts = options or {} opts.setdefault("registries", {}) opts.setdefault("default_repository", "{directory}/{name}") docker_options = create_subsystem(DockerOptions, **opts) tgt = cast(DockerImageTarget, rule_runner.get_target(address)) fs = PublishDockerImageFieldSet.create(tgt) packages = build(tgt, docker_options) result = rule_runner.request(PublishProcesses, [fs._request(packages)]) docker = rule_runner.request(DockerBinary, []) return result, docker
def do_test(expected_url: str, expected_length: int, expected_sha256: str, plat: Platform, version: str) -> None: foobar = create_subsystem( FooBar, version=version, known_versions=FooBar.default_known_versions, ) templated_foobar = create_subsystem( TemplatedFooBar, version=version, known_versions=TemplatedFooBar.default_known_versions, url_template=TemplatedFooBar.default_url_template, url_platform_mapping=TemplatedFooBar.default_url_platform_mapping, ) expected = ExternalToolRequest( DownloadFile(url=expected_url, expected_digest=FileDigest(expected_sha256, expected_length)), f"foobar-{version}/bin/foobar", ) assert expected == foobar.get_request(plat) assert expected == templated_foobar.get_request(plat)
def test_version_constraints(caplog, version, action, assert_expectation, expect_logged) -> None: caplog.set_level(logging.DEBUG) caplog.clear() with assert_expectation: create_subsystem( ConstrainedTool, version=version, use_unsupported_version=action, known_versions=ConstrainedTool.default_known_versions, url_template=ConstrainedTool.default_url_template, url_platform_mapping=ConstrainedTool.default_url_platform_mapping, ).get_request(Platform.macos_x86_64) if expect_logged: assert len(caplog.records) == len(expect_logged) for idx, (lvl, msg) in enumerate(expect_logged): log_record = caplog.records[idx] assert msg in log_record.message assert lvl == log_record.levelno else: assert not caplog.records
def single_target_run( self, *, console: MockConsole, program_text: bytes, address_spec: str, ) -> Run: workspace = Workspace(self.scheduler) interactive_runner = InteractiveRunner(self.scheduler) class TestRunFieldSet(RunFieldSet): required_fields = () class TestBinaryTarget(Target): alias = "binary" core_fields = () address = Address.parse(address_spec) target = TestBinaryTarget({}, address=address) target_with_origin = TargetWithOrigin( target, AddressLiteralSpec(address.spec_path, address.target_name)) field_set = TestRunFieldSet.create(target) res = run_rule_with_mocks( run, rule_args=[ create_goal_subsystem(RunSubsystem, args=[]), create_subsystem(GlobalOptions, pants_workdir=self.pants_workdir), console, interactive_runner, workspace, BuildRoot(), ], mock_gets=[ MockGet( product_type=TargetsToValidFieldSets, subject_type=TargetsToValidFieldSetsRequest, mock=lambda _: TargetsToValidFieldSets( {target_with_origin: [field_set]}), ), MockGet( product_type=RunRequest, subject_type=TestRunFieldSet, mock=lambda _: self.create_mock_run_request(program_text), ), ], ) return cast(Run, res)
def assert_timeout_calculated( *, field_value: int | None, expected: int | None, global_default: int | None = None, global_max: int | None = None, timeouts_enabled: bool = True, ) -> None: field = PythonTestsTimeoutField(field_value, Address("", target_name="tests")) pytest = create_subsystem( PyTest, timeouts=timeouts_enabled, timeout_default=global_default, timeout_maximum=global_max, ) assert field.calculate_from_global_options(pytest) == expected
def test_group_field_sets_by_constraints() -> None: py2_fs = MockFieldSet.create_for_test("//:py2", ">=2.7,<3") py3_fs = [ MockFieldSet.create_for_test("//:py3", "==3.6.*"), MockFieldSet.create_for_test("//:py3_second", "==3.6.*"), ] no_constraints_fs = MockFieldSet.create_for_test("//:no_constraints", None) assert PexInterpreterConstraints.group_field_sets_by_constraints( [py2_fs, *py3_fs, no_constraints_fs], python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), ) == FrozenDict({ PexInterpreterConstraints(): (no_constraints_fs, ), PexInterpreterConstraints(["CPython>=2.7,<3"]): (py2_fs, ), PexInterpreterConstraints(["CPython==3.6.*"]): tuple(py3_fs), })
def resolve_config(path: str | None, content: str | None) -> str: coverage_subsystem = create_subsystem(CoverageSubsystem, config=path, config_discovery=True) resolved_config: list[str] = [] def mock_find_existing_config(request: ConfigFilesRequest) -> ConfigFiles: if request.specified: assert path is not None snapshot = RuleRunner().make_snapshot_of_empty_files([path]) else: snapshot = EMPTY_SNAPSHOT return ConfigFiles(snapshot) def mock_read_existing_config(_: Digest) -> DigestContents: # This shouldn't be called if no config file provided. assert path is not None assert content is not None return DigestContents([FileContent(path, content.encode())]) def mock_create_final_config(request: CreateDigest) -> Digest: assert len(request) == 1 assert isinstance(request[0], FileContent) assert request[0].path == path if path is not None else ".coveragerc" assert request[0].is_executable is False resolved_config.append(request[0].content.decode()) return EMPTY_DIGEST mock_gets = [ MockGet(output_type=ConfigFiles, input_type=ConfigFilesRequest, mock=mock_find_existing_config), MockGet(output_type=DigestContents, input_type=Digest, mock=mock_read_existing_config), MockGet(output_type=Digest, input_type=CreateDigest, mock=mock_create_final_config), ] result = run_rule_with_mocks(create_or_update_coverage_config, rule_args=[coverage_subsystem], mock_gets=mock_gets) assert result.digest == EMPTY_DIGEST assert len(resolved_config) == 1 return resolved_config[0]
def assert_timeout_calculated( *, field_value: Optional[int], expected: Optional[int], global_default: Optional[int] = None, global_max: Optional[int] = None, timeouts_enabled: bool = True, ) -> None: field = PythonTestsTimeout(field_value, address=Address.parse(":tests")) pytest = create_subsystem( PyTest, timeouts=timeouts_enabled, timeout_default=global_default, timeout_maximum=global_max, ) assert field.calculate_from_global_options(pytest) == expected
def single_target_run( rule_runner: RuleRunner, address: Address, console: MockConsole, *, program_text: bytes, ) -> Run: workspace = Workspace(rule_runner.scheduler) interactive_runner = InteractiveRunner(rule_runner.scheduler) class TestRunFieldSet(RunFieldSet): required_fields = () class TestBinaryTarget(Target): alias = "binary" core_fields = () target = TestBinaryTarget({}, address=address) field_set = TestRunFieldSet.create(target) res = run_rule_with_mocks( run, rule_args=[ create_goal_subsystem(RunSubsystem, args=[]), create_subsystem(GlobalOptions, pants_workdir=rule_runner.pants_workdir), console, interactive_runner, workspace, BuildRoot(), ], mock_gets=[ MockGet( output_type=TargetRootsToFieldSets, input_type=TargetRootsToFieldSetsRequest, mock=lambda _: TargetRootsToFieldSets({target: [field_set]}), ), MockGet( output_type=RunRequest, input_type=TestRunFieldSet, mock=lambda _: create_mock_run_request(rule_runner, program_text), ), ], ) return cast(Run, res)
def run_create_coverage_config_rule(coverage_config: Optional[str]) -> str: coverage = create_subsystem( CoverageSubsystem, config="some_file" if coverage_config else None) resolved_config: List[str] = [] def mock_find_existing_config(request: ConfigFilesRequest) -> ConfigFiles: snapshot = (EMPTY_SNAPSHOT if not request.specified else RuleRunner().make_snapshot_of_empty_files([".coveragerc"])) return ConfigFiles(snapshot) def mock_read_existing_config(_: Digest) -> DigestContents: # This shouldn't be called if no config file provided. assert coverage_config is not None return DigestContents([ FileContent(path="/dev/null/prelude", content=coverage_config.encode()) ]) def mock_create_final_config(request: CreateDigest) -> Digest: assert len(request) == 1 assert request[0].path == ".coveragerc" assert isinstance(request[0], FileContent) assert request[0].is_executable is False resolved_config.append(request[0].content.decode()) return EMPTY_DIGEST mock_gets = [ MockGet(output_type=ConfigFiles, input_type=ConfigFilesRequest, mock=mock_find_existing_config), MockGet(output_type=DigestContents, input_type=Digest, mock=mock_read_existing_config), MockGet(output_type=Digest, input_type=CreateDigest, mock=mock_create_final_config), ] result = run_rule_with_mocks(create_coverage_config, rule_args=[coverage], mock_gets=mock_gets) assert result.digest.fingerprint == EMPTY_DIGEST.fingerprint assert len(resolved_config) == 1 return resolved_config[0]
def test_group_field_sets_by_constraints() -> None: py2_fs = MockFieldSet.create_for_test(Address("", target_name="py2"), ">=2.7,<3") py3_fs = [ MockFieldSet.create_for_test(Address("", target_name="py3"), "==3.6.*"), MockFieldSet.create_for_test(Address("", target_name="py3_second"), "==3.6.*"), ] no_constraints_fs = MockFieldSet.create_for_test( Address("", target_name="no_constraints"), None ) assert InterpreterConstraints.group_field_sets_by_constraints( [py2_fs, *py3_fs, no_constraints_fs], python_setup=create_subsystem(PythonSetup, interpreter_constraints=[]), ) == FrozenDict( { InterpreterConstraints(): (no_constraints_fs,), InterpreterConstraints(["CPython>=2.7,<3"]): (py2_fs,), InterpreterConstraints(["CPython==3.6.*"]): tuple(py3_fs), } )
def _find_root( path: str, patterns: Iterable[str], marker_filenames: Optional[Iterable[str]] = None, existing_marker_files: Optional[Iterable[str]] = None, ) -> Optional[str]: source_root_config = create_subsystem( SourceRootConfig, root_patterns=list(patterns), marker_filenames=list(marker_filenames or []), ) # This inner function is passed as the callable to the mock, to allow recursion in the rule. def _mock_fs_check(pathglobs: PathGlobs) -> Snapshot: for glob in pathglobs.globs: if glob in (existing_marker_files or []): d, f = os.path.split(pathglobs.globs[0]) return Snapshot(digest=Digest("111", 111), files=(f, ), dirs=(d, )) return Snapshot(digest=Digest("000", 000), files=tuple(), dirs=tuple()) def _do_find_root(src_root_req: SourceRootRequest) -> OptionalSourceRoot: return cast( OptionalSourceRoot, run_rule_with_mocks( get_optional_source_root, rule_args=[src_root_req, source_root_config], mock_gets=[ MockGet( product_type=OptionalSourceRoot, subject_type=SourceRootRequest, mock=_do_find_root, ), MockGet(product_type=Snapshot, subject_type=PathGlobs, mock=_mock_fs_check), ], ), ) source_root = _do_find_root(SourceRootRequest(PurePath(path))).source_root return None if source_root is None else source_root.path
def run_prelude_parsing_rule(prelude_content: str) -> BuildFilePreludeSymbols: symbols = run_rule_with_mocks( evaluate_preludes, rule_args=[ create_subsystem(GlobalOptions, build_file_prelude_globs=["prelude"]) ], mock_gets=[ MockGet( output_type=DigestContents, input_type=PathGlobs, mock=lambda _: DigestContents([ FileContent(path="/dev/null/prelude", content=prelude_content.encode()) ]), ), ], ) return cast(BuildFilePreludeSymbols, symbols)
def test_get_context_root(context_root: str | None, default_context_root: str, expected_context_root: str | ContextManager) -> None: if isinstance(expected_context_root, str): raises = cast("ContextManager", no_exception()) else: raises = expected_context_root expected_context_root = "" with raises: docker_options = create_subsystem( DockerOptions, default_context_root=default_context_root, ) address = Address("src/docker", target_name="image") tgt = DockerImageTarget({"context_root": context_root}, address) fs = DockerFieldSet.create(tgt) actual_context_root = fs.get_context_root( docker_options.default_context_root) if expected_context_root: assert actual_context_root == expected_context_root
def assert_ics( lockfile: str, expected: list[str], *, ics: RankedValue = RankedValue(Rank.HARDCODED, Black.default_interpreter_constraints), metadata: PythonLockfileMetadata | None = PythonLockfileMetadata.new( InterpreterConstraints(["==2.7.*"]), set()), ) -> None: black = create_subsystem( Black, lockfile=lockfile, interpreter_constraints=ics, version="v", extra_requirements=[], ) loaded_lock = LoadedLockfile( EMPTY_DIGEST, "black.lock", metadata=metadata, requirement_estimate=1, is_pex_native=True, constraints_strings=None, original_lockfile=Lockfile("black.lock", file_path_description_of_origin="foo", resolve_name="black"), ) result = run_rule_with_mocks( _find_python_interpreter_constraints_from_lockfile, rule_args=[black], mock_gets=[ MockGet( output_type=LoadedLockfile, input_type=LoadedLockfileRequest, mock=lambda _: loaded_lock, ) ], ) assert result == InterpreterConstraints(expected)
def test_parse_address_family_empty() -> None: """Test that parsing an empty BUILD file results in an empty AddressFamily.""" af = run_rule_with_mocks( parse_address_family, rule_args=[ Parser(target_type_aliases=[], object_aliases=BuildFileAliases()), create_subsystem(GlobalOptions, build_patterns=["BUILD"], build_ignore=[]), BuildFilePreludeSymbols(FrozenDict()), AddressFamilyDir("/dev/null"), ], mock_gets=[ MockGet( output_type=DigestContents, input_type=PathGlobs, mock=lambda _: DigestContents( [FileContent(path="/dev/null/BUILD", content=b"")]), ), ], ) assert len(af.name_to_target_adaptors) == 0
def create(resolves_to_ics: dict[str, list[str]]) -> dict[str, tuple[str, ...]]: return create_subsystem( PythonSetup, resolves={"a": "a.lock"}, resolves_to_interpreter_constraints=resolves_to_ics, ).resolves_to_interpreter_constraints
def test_all_roots() -> None: dirs = ( "contrib/go/examples/src/go/src", "src/java", "src/python", "src/python/subdir/src/python", # We allow source roots under source roots. "src/kotlin", "my/project/src/java", "src/example/java", "src/example/python", "fixed/root/jvm", ) source_root_config = create_subsystem( SourceRootConfig, root_patterns=[ "src/*", "src/example/*", "contrib/go/examples/src/go/src", # Dir does not exist, should not be listed as a root. "java", "fixed/root/jvm", ], marker_filenames=[], ) # This function mocks out reading real directories off the file system. def provider_rule(_: PathGlobs) -> Snapshot: return Snapshot(Digest("abcdef", 10), (), dirs) def source_root_mock_rule(req: SourceRootRequest) -> OptionalSourceRoot: for d in dirs: if str(req.path).startswith(d): return OptionalSourceRoot(SourceRoot(str(req.path))) return OptionalSourceRoot(None) output = run_rule_with_mocks( all_roots, rule_args=[source_root_config], mock_gets=[ MockGet(product_type=Snapshot, subject_type=PathGlobs, mock=provider_rule), MockGet( product_type=OptionalSourceRoot, subject_type=SourceRootRequest, mock=source_root_mock_rule, ), ], ) assert { SourceRoot("contrib/go/examples/src/go/src"), SourceRoot("src/java"), SourceRoot("src/python"), SourceRoot("src/python/subdir/src/python"), SourceRoot("src/kotlin"), SourceRoot("src/example/java"), SourceRoot("src/example/python"), SourceRoot("my/project/src/java"), SourceRoot("fixed/root/jvm"), } == set(output)
def test_no_compatible_resolve_error() -> None: python_setup = create_subsystem(PythonSetup, resolves={ "a": "", "b": "" }, enable_resolves=True) t1 = PythonRequirementTarget( { PythonRequirementsField.alias: [], PythonResolveField.alias: "a", Dependencies.alias: ["//:t3"], }, Address("", target_name="t1"), ) t2 = PythonSourceTarget( { PythonSourceField.alias: "f.py", PythonResolveField.alias: "a", Dependencies.alias: ["//:t3"], }, Address("", target_name="t2"), ) t3 = PythonSourceTarget( { PythonSourceField.alias: "f.py", PythonResolveField.alias: "b" }, Address("", target_name="t3"), ) def maybe_get_resolve(t: Target) -> str | None: if not t.has_field(PythonResolveField): return None return t[PythonResolveField].normalized_value(python_setup) bad_roots_err = str( NoCompatibleResolveException.bad_input_roots( [t2, t3], maybe_get_resolve=maybe_get_resolve, doc_url_slug="", workaround=None)) assert bad_roots_err.startswith( softwrap(""" The input targets did not have a resolve in common. a: * //:t2 b: * //:t3 Targets used together must use the same resolve, set by the `resolve` field. """)) bad_single_dep_err = str( NoCompatibleResolveException.bad_dependencies( maybe_get_resolve=maybe_get_resolve, doc_url_slug="", root_targets=[t1], root_resolve="a", dependencies=[t3], )) assert bad_single_dep_err.startswith( softwrap(""" The target //:t1 uses the `resolve` `a`, but some of its dependencies are not compatible with that resolve: * //:t3 (b) All dependencies must work with the same `resolve`. To fix this, either change the `resolve=` field on those dependencies to `a`, or change the `resolve=` of the target //:t1. """)) bad_multiple_deps_err = str( NoCompatibleResolveException.bad_dependencies( maybe_get_resolve=maybe_get_resolve, doc_url_slug="", root_targets=[t1, t2], root_resolve="a", dependencies=[t3], )) assert bad_multiple_deps_err.startswith( softwrap(""" The input targets use the `resolve` `a`, but some of their dependencies are not compatible with that resolve. Input targets: * //:t1 * //:t2 Bad dependencies: * //:t3 (b) All dependencies must work with the same `resolve`. To fix this, either change the `resolve=` field on those dependencies to `a`, or change the `resolve=` of the input targets. """))
def assert_build( rule_runner: RuleRunner, address: Address, *extra_log_lines: str, options: dict | None = None, process_assertions: Callable[[Process], None] | None = None, ) -> None: tgt = rule_runner.get_target(address) def build_context_mock(request: DockerBuildContextRequest) -> DockerBuildContext: return DockerBuildContext.create( digest=EMPTY_DIGEST, dockerfile_info=DockerfileInfo(source="docker/test/Dockerfile"), build_args=rule_runner.request(DockerBuildArgs, [DockerBuildArgsRequest(tgt)]), env=rule_runner.request(DockerBuildEnvironment, [DockerBuildEnvironmentRequest(tgt)]), ) def run_process_mock(process: Process) -> ProcessResult: if process_assertions: process_assertions(process) return ProcessResult( stdout=b"stdout", stdout_digest=EMPTY_FILE_DIGEST, stderr=b"stderr", stderr_digest=EMPTY_FILE_DIGEST, output_digest=EMPTY_DIGEST, platform=Platform.current, metadata=ProcessResultMetadata(0, "ran_locally", 0), ) if options: opts = options or {} opts.setdefault("registries", {}) opts.setdefault("default_repository", "{directory}/{name}") opts.setdefault("build_args", []) opts.setdefault("env_vars", []) docker_options = create_subsystem( DockerOptions, **opts, ) else: docker_options = rule_runner.request(DockerOptions, []) result = run_rule_with_mocks( build_docker_image, rule_args=[ DockerFieldSet.create(tgt), docker_options, DockerBinary("/dummy/docker"), ], mock_gets=[ MockGet( output_type=DockerBuildContext, input_type=DockerBuildContextRequest, mock=build_context_mock, ), MockGet( output_type=ProcessResult, input_type=Process, mock=run_process_mock, ), ], ) assert result.digest == EMPTY_DIGEST assert len(result.artifacts) == 1 assert result.artifacts[0].relpath is None for log_line in extra_log_lines: assert log_line in result.artifacts[0].extra_log_lines