def test_go_mod_info(rule_runner: RuleRunner) -> None: go_mod_content = dedent("""\ module go.example.com/foo go 1.17 require github.com/golang/protobuf v1.4.2 """) go_sum_content = "does not matter" rule_runner.write_files({ "foo/go.mod": go_mod_content, "foo/go.sum": go_sum_content, "foo/BUILD": "go_mod()" }) go_mod_info = rule_runner.request(GoModInfo, [GoModInfoRequest(Address("foo"))]) assert go_mod_info.import_path == "go.example.com/foo" assert (go_mod_info.digest == rule_runner.make_snapshot({ "foo/go.mod": go_mod_content, "foo/go.sum": go_sum_content }).digest) assert (go_mod_info.stripped_digest == rule_runner.make_snapshot({ "go.mod": go_mod_content, "go.sum": go_sum_content }).digest)
def test_snapshot_diff( rule_runner: RuleRunner, before: Dict[str, str], after: Dict[str, str], expected_diff: SnapshotDiff, ) -> None: diff = SnapshotDiff.from_snapshots( rule_runner.make_snapshot(before), rule_runner.make_snapshot(after) ) assert diff.our_unique_files == expected_diff.our_unique_files assert diff.our_unique_dirs == expected_diff.our_unique_dirs assert diff.their_unique_files == expected_diff.their_unique_files assert diff.their_unique_dirs == expected_diff.their_unique_dirs assert diff.changed_files == expected_diff.changed_files # test with the arguments reversed diff = SnapshotDiff.from_snapshots( rule_runner.make_snapshot(after), rule_runner.make_snapshot(before) ) assert diff.our_unique_files == expected_diff.their_unique_files assert diff.our_unique_dirs == expected_diff.their_unique_dirs assert diff.their_unique_files == expected_diff.our_unique_files assert diff.their_unique_dirs == expected_diff.our_unique_dirs assert diff.changed_files == expected_diff.changed_files
def test_maybe_constraints_file() -> None: rule_runner = RuleRunner( rules=[ resolve_requirements_constraints_file, SubsystemRule(PythonSetup), QueryRule(MaybeConstraintsFile, []), ], target_types=[PythonRequirementConstraints], ) constraints = ["c1==1.1.1", "c2==2.2.2"] constraints_file = "\n".join(constraints) rule_runner.create_file("constraints.txt", constraints_file) rule_runner.add_to_build_file( "", f"_python_constraints(name='constraints', constraints={repr(constraints)})" ) def get_constraints(arg: str | None) -> MaybeConstraintsFile: if arg: rule_runner.set_options([arg]) return rule_runner.request(MaybeConstraintsFile, []) assert get_constraints(None) == MaybeConstraintsFile(None, EMPTY_DIGEST) expected_digest = rule_runner.make_snapshot({"constraints.txt": constraints_file}).digest assert get_constraints( "--python-setup-requirement-constraints=constraints.txt" ) == MaybeConstraintsFile("constraints.txt", expected_digest) expected_digest = rule_runner.make_snapshot( {"constraints.generated.txt": constraints_file} ).digest assert get_constraints( "--python-setup-requirement-constraints-target=//:constraints" ) == MaybeConstraintsFile("constraints.generated.txt", expected_digest)
def test_duplicate_test_mains_different_files(rule_runner: RuleRunner) -> None: input_digest = rule_runner.make_snapshot( { "foo_test.go": dedent(""" package foo func TestMain(m *testing.M) { } """), "bar_test.go": dedent(""" package foo func TestMain(m *testing.M) { } """), }, ).digest with pytest.raises(ExecutionError) as exc_info: rule_runner.request( GeneratedTestMain, [ GenerateTestMainRequest( input_digest, FrozenOrderedSet(["foo_test.go", "bar_test.go"]), FrozenOrderedSet(), "foo", ) ], ) assert "multiple definitions of TestMain" in str(exc_info.value)
def test_duplicate_test_mains_different_files(rule_runner: RuleRunner) -> None: input_digest = rule_runner.make_snapshot( { "foo_test.go": dedent(""" package foo func TestMain(m *testing.M) { } """), "bar_test.go": dedent(""" package foo func TestMain(m *testing.M) { } """), }, ).digest result = rule_runner.request( GeneratedTestMain, [ GenerateTestMainRequest( input_digest, FrozenOrderedSet(["foo_test.go", "bar_test.go"]), FrozenOrderedSet(), "foo", Address("foo"), ) ], ) assert result.failed_exit_code_and_stderr is not None exit_code, stderr = result.failed_exit_code_and_stderr assert exit_code == 1 assert "multiple definitions of TestMain" in stderr
def test_create_tar_archive(rule_runner: RuleRunner, format: ArchiveFormat) -> None: output_filename = f"demo/a.{format.value}" input_snapshot = rule_runner.make_snapshot(FILES) created_digest = rule_runner.request( Digest, [ CreateArchive( input_snapshot, output_filename=output_filename, format=format) ], ) digest_contents = rule_runner.request(DigestContents, [created_digest]) assert len(digest_contents) == 1 io = BytesIO() io.write(digest_contents[0].content) io.seek(0) compression = "" if format == ArchiveFormat.TAR else f"{format.value[4:]}" # Strip `tar.`. with tarfile.open(fileobj=io, mode=f"r:{compression}") as tf: print(tf.getmembers()) assert set(tf.getnames()) == set(FILES.keys()) # We also use Pants to extract the created archive, which checks for idempotency. extracted_archive = rule_runner.request(ExtractedArchive, [created_digest]) digest_contents = rule_runner.request(DigestContents, [extracted_archive.digest]) assert digest_contents == EXPECTED_DIGEST_CONTENTS
def assert_injected( rule_runner: RuleRunner, *, source_roots: List[str], original_declared_files: List[str], original_undeclared_files: List[str], expected_discovered: List[str], ) -> None: rule_runner.set_options([f"--source-root-patterns={source_roots}"]) for f in original_undeclared_files: rule_runner.create_file(f, "# undeclared") request = AncestorFilesRequest( "__init__.py", rule_runner.make_snapshot({fp: "# declared" for fp in original_declared_files}), ) result = rule_runner.request(AncestorFiles, [request]).snapshot assert list(result.files) == sorted(expected_discovered) materialized_result = rule_runner.request(DigestContents, [result.digest]) for file_content in materialized_result: path = file_content.path if not path.endswith("__init__.py"): continue assert path in original_declared_files or path in expected_discovered expected = b"# declared" if path in original_declared_files else b"# undeclared" assert file_content.content == expected
def test_warn_if_python_version_configured(rule_runner: RuleRunner, caplog) -> None: config = {"mypy.ini": "[mypy]\npython_version = 3.6"} rule_runner.write_files(config) # type: ignore[arg-type] config_digest = rule_runner.make_snapshot(config).digest def maybe_assert_configured(*, has_config: bool, args: list[str], warning: str = "") -> None: rule_runner.set_options( [ f"--mypy-args={repr(args)}", f"--mypy-config-discovery={has_config}" ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) result = rule_runner.request(MyPyConfigFile, []) assert result.digest == (config_digest if has_config else EMPTY_DIGEST) should_be_configured = has_config or bool(args) assert result._python_version_configured == should_be_configured autoset_python_version = result.python_version_to_autoset( InterpreterConstraints([">=3.6"]), ["2.7", "3.6", "3.7", "3.8"]) if should_be_configured: assert autoset_python_version is None else: assert autoset_python_version == "3.6" if should_be_configured: assert caplog.records assert warning in caplog.text caplog.clear() else: assert not caplog.records maybe_assert_configured(has_config=True, args=[], warning="You set `python_version` in mypy.ini") maybe_assert_configured( has_config=False, args=["--py2"], warning="You set `--py2` in the `--mypy-args` option") maybe_assert_configured( has_config=False, args=["--python-version=3.6"], warning="You set `--python-version` in the `--mypy-args` option", ) maybe_assert_configured( has_config=True, args=["--py2", "--python-version=3.6"], warning=softwrap(""" You set `python_version` in mypy.ini (which is used because of either config discovery or the `[mypy].config` option) and you set `--py2` in the `--mypy-args` option and you set `--python-version` in the `--mypy-args` option. """), ) maybe_assert_configured(has_config=False, args=[])
def test_build_invalid_pkg(rule_runner: RuleRunner) -> None: invalid_dep = BuildGoPackageRequest( import_path="example.com/foo/dep", subpath="dep", go_file_names=("f.go", ), digest=rule_runner.make_snapshot({ "dep/f.go": "invalid!!!" }).digest, s_file_names=(), direct_dependencies=(), minimum_go_version=None, ) main = BuildGoPackageRequest( import_path="example.com/foo", subpath="", go_file_names=("f.go", ), digest=rule_runner.make_snapshot({ "f.go": dedent("""\ package foo import "example.com/foo/dep" func main() { dep.Quote("Hello world!") } """) }).digest, s_file_names=(), direct_dependencies=(invalid_dep, ), minimum_go_version=None, ) invalid_direct_result = rule_runner.request(FallibleBuiltGoPackage, [invalid_dep]) assert invalid_direct_result.output is None assert invalid_direct_result.exit_code == 1 assert (invalid_direct_result.stdout == "./dep/f.go:1:1: syntax error: package statement must be first\n") invalid_dep_result = rule_runner.request(FallibleBuiltGoPackage, [main]) assert invalid_dep_result.output is None assert invalid_dep_result.exit_code == 1 assert (invalid_dep_result.stdout == "./dep/f.go:1:1: syntax error: package statement must be first\n")
def test_first_party_plugins(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "BUILD": dedent( """\ python_requirement(name='flake8', requirements=['flake8==2.11.1']) python_requirement(name='colors', requirements=['ansicolors']) """ ), "flake8-plugins/subdir1/util.py": "", "flake8-plugins/subdir1/BUILD": dedent( """\ python_sources( interpreter_constraints=['==3.9.*'], dependencies=['flake8-plugins/subdir2'] ) """ ), "flake8-plugins/subdir2/another_util.py": "", "flake8-plugins/subdir2/BUILD": "python_sources(interpreter_constraints=['==3.8.*'])", "flake8-plugins/plugin.py": "", "flake8-plugins/BUILD": dedent( """\ python_sources( dependencies=['//:flake8', '//:colors', "flake8-plugins/subdir1"] ) """ ), } ) rule_runner.set_options( [ "--source-root-patterns=flake8-plugins", "--flake8-source-plugins=flake8-plugins/plugin.py", ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) first_party_plugins = rule_runner.request(Flake8FirstPartyPlugins, []) assert first_party_plugins.requirement_strings == FrozenOrderedSet( ["ansicolors", "flake8==2.11.1"] ) assert first_party_plugins.interpreter_constraints_fields == FrozenOrderedSet( [ InterpreterConstraintsField(ic, Address("", target_name="tgt")) for ic in (None, ["==3.9.*"], ["==3.8.*"]) ] ) assert ( first_party_plugins.sources_digest == rule_runner.make_snapshot( { f"{Flake8FirstPartyPlugins.PREFIX}/plugin.py": "", f"{Flake8FirstPartyPlugins.PREFIX}/subdir1/util.py": "", f"{Flake8FirstPartyPlugins.PREFIX}/subdir2/another_util.py": "", } ).digest )
def test_extract_non_archive(rule_runner: RuleRunner) -> None: input_snapshot = rule_runner.make_snapshot( {"test.sh": b"# A shell script"}) extracted_archive = rule_runner.request(ExtractedArchive, [input_snapshot.digest]) digest_contents = rule_runner.request(DigestContents, [extracted_archive.digest]) assert DigestContents([FileContent("test.sh", b"# A shell script") ]) == digest_contents
def test_first_party_plugins(rule_runner: RuleRunner) -> None: rule_runner.write_files( { "BUILD": dedent( """\ python_requirement_library(name='pylint', requirements=['pylint==2.6.2']) python_requirement_library(name='colors', requirements=['ansicolors']) """ ), "pylint-plugins/subdir1/util.py": "", "pylint-plugins/subdir1/BUILD": dedent( """\ python_library( interpreter_constraints=['==3.5.*'], dependencies=['pylint-plugins/subdir2'] ) """ ), "pylint-plugins/subdir2/another_util.py": "", "pylint-plugins/subdir2/BUILD": ("python_library(interpreter_constraints=['==3.4.*'])"), "pylint-plugins/plugin.py": "", "pylint-plugins/BUILD": dedent( """\ python_library( dependencies=['//:pylint', '//:colors', "pylint-plugins/subdir1"] ) """ ), } ) rule_runner.set_options( [ "--source-root-patterns=pylint-plugins", "--pylint-source-plugins=pylint-plugins/plugin.py", ] ) first_party_plugins = rule_runner.request(PylintFirstPartyPlugins, []) assert first_party_plugins.requirement_strings == FrozenOrderedSet( ["ansicolors", "pylint==2.6.2"] ) assert first_party_plugins.interpreter_constraints_fields == FrozenOrderedSet( [ InterpreterConstraintsField(ic, Address("", target_name="tgt")) for ic in (None, ["==3.5.*"], ["==3.4.*"]) ] ) assert ( first_party_plugins.sources_digest == rule_runner.make_snapshot( { f"{PylintFirstPartyPlugins.PREFIX}/plugin.py": "", f"{PylintFirstPartyPlugins.PREFIX}/subdir1/util.py": "", f"{PylintFirstPartyPlugins.PREFIX}/subdir2/another_util.py": "", } ).digest )
def test_non_archive(rule_runner: RuleRunner) -> None: input_snapshot = rule_runner.make_snapshot( {"test.sh": b"# A shell script"}) extracted_digest = rule_runner.request_product( ExtractedDigest, [MaybeExtractable(input_snapshot.digest)]) digest_contents = rule_runner.request_product(DigestContents, [extracted_digest.digest]) assert DigestContents([FileContent("test.sh", b"# A shell script") ]) == digest_contents
def test_extract_zip(rule_runner: RuleRunner, compression: int) -> None: io = BytesIO() with zipfile.ZipFile(io, "w", compression=compression) as zf: for name, content in FILES.items(): zf.writestr(name, content) io.flush() input_snapshot = rule_runner.make_snapshot({"test.zip": io.getvalue()}) extracted_archive = rule_runner.request(ExtractedArchive, [input_snapshot.digest]) digest_contents = rule_runner.request(DigestContents, [extracted_archive.digest]) assert digest_contents == EXPECTED_DIGEST_CONTENTS
def test_extract_tar(rule_runner: RuleRunner, compression: str) -> None: io = BytesIO() mode = f"w:{compression}" if compression else "w" with tarfile.open(mode=mode, fileobj=io) as tf: for name, content in FILES.items(): tarinfo = tarfile.TarInfo(name) tarinfo.size = len(content) tf.addfile(tarinfo, BytesIO(content)) ext = f"tar.{compression}" if compression else "tar" input_snapshot = rule_runner.make_snapshot({f"test.{ext}": io.getvalue()}) extracted_archive = rule_runner.request(ExtractedArchive, [input_snapshot.digest]) digest_contents = rule_runner.request(DigestContents, [extracted_archive.digest]) assert digest_contents == EXPECTED_DIGEST_CONTENTS
def test_extract_gz(rule_runner: RuleRunner) -> None: # NB: `gz` files are only compressed, and are not archives: they represent a single file. name = "test" content = b"Hello world!\n" io = BytesIO() with gzip.GzipFile(fileobj=io, mode="w") as gzf: gzf.write(content) io.flush() input_snapshot = rule_runner.make_snapshot({f"{name}.gz": io.getvalue()}) rule_runner.set_options(args=[], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) extracted_archive = rule_runner.request(ExtractedArchive, [input_snapshot.digest]) digest_contents = rule_runner.request(DigestContents, [extracted_archive.digest]) assert digest_contents == DigestContents([FileContent(name, content)])
def test_first_party_plugins(rule_runner: RuleRunner) -> None: rule_runner.write_files({ "BUILD": dedent("""\ python_requirement(name='mypy', requirements=['mypy==0.81']) python_requirement(name='colors', requirements=['ansicolors']) """), "mypy-plugins/subdir1/util.py": "", "mypy-plugins/subdir1/BUILD": "python_sources(dependencies=['mypy-plugins/subdir2'])", "mypy-plugins/subdir2/another_util.py": "", "mypy-plugins/subdir2/BUILD": "python_sources()", "mypy-plugins/plugin.py": "", "mypy-plugins/BUILD": dedent("""\ python_sources( dependencies=['//:mypy', '//:colors', "mypy-plugins/subdir1"] ) """), }) rule_runner.set_options( [ "--source-root-patterns=mypy-plugins", "--mypy-source-plugins=mypy-plugins/plugin.py", ], env_inherit={"PATH", "PYENV_ROOT", "HOME"}, ) first_party_plugins = rule_runner.request(MyPyFirstPartyPlugins, []) assert first_party_plugins.requirement_strings == FrozenOrderedSet( ["ansicolors", "mypy==0.81"]) assert (first_party_plugins.sources_digest == rule_runner.make_snapshot({ "mypy-plugins/plugin.py": "", "mypy-plugins/subdir1/util.py": "", "mypy-plugins/subdir2/another_util.py": "", }).digest) assert first_party_plugins.source_roots == ("mypy-plugins", )
def test_create_zip_archive(rule_runner: RuleRunner) -> None: output_filename = "demo/a.zip" input_snapshot = rule_runner.make_snapshot(FILES) created_digest = rule_runner.request( Digest, [CreateArchive(input_snapshot, output_filename=output_filename, format=ArchiveFormat.ZIP)], ) digest_contents = rule_runner.request(DigestContents, [created_digest]) assert len(digest_contents) == 1 io = BytesIO() io.write(digest_contents[0].content) with zipfile.ZipFile(io) as zf: assert set(zf.namelist()) == set(FILES.keys()) # We also use Pants to extract the created archive, which checks for idempotency. extracted_archive = rule_runner.request(ExtractedArchive, [created_digest]) digest_contents = rule_runner.request(DigestContents, [extracted_archive.digest]) assert digest_contents == EXPECTED_DIGEST_CONTENTS
def test_incorrect_signatures(rule_runner: RuleRunner) -> None: test_cases = [ ("TestFoo(t *testing.T, a int)", "wrong signature for TestFoo"), ("TestFoo()", "wrong signature for TestFoo"), ("TestFoo(t *testing.B)", "wrong signature for TestFoo"), ("TestFoo(t *testing.M)", "wrong signature for TestFoo"), ("TestFoo(a int)", "wrong signature for TestFoo"), ("BenchmarkFoo(t *testing.B, a int)", "wrong signature for BenchmarkFoo"), ("BenchmarkFoo()", "wrong signature for BenchmarkFoo"), ("BenchmarkFoo(t *testing.T)", "wrong signature for BenchmarkFoo"), ("BenchmarkFoo(t *testing.M)", "wrong signature for BenchmarkFoo"), ("BenchmarkFoo(a int)", "wrong signature for BenchmarkFoo"), ] for test_sig, err_msg in test_cases: input_digest = rule_runner.make_snapshot( { "foo_test.go": dedent(f""" package foo func {test_sig} {{ }} """), }, ).digest result = rule_runner.request( GeneratedTestMain, [ GenerateTestMainRequest( input_digest, FrozenOrderedSet(["foo_test.go"]), FrozenOrderedSet(), "foo", Address("foo"), ) ], ) assert result.failed_exit_code_and_stderr is not None exit_code, stderr = result.failed_exit_code_and_stderr assert exit_code == 1 assert err_msg in stderr
def test_basic_test_analysis(rule_runner: RuleRunner) -> None: input_digest = rule_runner.make_snapshot( { "foo_test.go": dedent(""" package foo func TestThisIsATest(t *testing.T) { } func Test(t *testing.T) { } """), "bar_test.go": dedent(""" package foo_test func BenchmarkThisIsABenchmark(b *testing.B) { } func Benchmark(b *testing.B) { } """), }, ).digest metadata = rule_runner.request( GeneratedTestMain, [ GenerateTestMainRequest( input_digest, FrozenOrderedSet(["foo_test.go"]), FrozenOrderedSet(["bar_test.go"]), "foo", Address("foo"), ) ], ) assert metadata.digest != EMPTY_DIGEST assert metadata.has_tests assert metadata.has_xtests
def test_collect_examples(rule_runner: RuleRunner) -> None: input_digest = rule_runner.make_snapshot( { "foo_test.go": dedent(""" package foo func ExampleEmptyOutputExpected() { // Output: } // This does not have an `Output` comment and will be skipped. func ExampleEmptyOutputAndNoOutputDirective() { } func ExampleSomeOutput() { fmt.Println("foo") // Output: foo } func ExampleAnotherOne() { fmt.Println("foo\\nbar\\n") // Output: // foo // bar } """), }, ).digest metadata = rule_runner.request( GeneratedTestMain, [ GenerateTestMainRequest( input_digest, FrozenOrderedSet(["foo_test.go"]), FrozenOrderedSet(), "foo", Address("foo"), ) ], ) assert metadata.digest != EMPTY_DIGEST assert metadata.has_tests assert not metadata.has_xtests
def test_build_invalid_package(rule_runner: RuleRunner) -> None: request = BuildGoPackageRequest( import_path="example.com/assembly", subpath="", go_file_names=("add_amd64.go", "add_arm64.go"), digest=rule_runner.make_snapshot({ "add_amd64.go": "package main\nfunc add(x, y int64) int64", "add_arm64.go": "package main\nfunc add(x, y int64) int64", "add_amd64.s": "INVALID!!!", "add_arm64.s": "INVALID!!!", }).digest, s_file_names=("add_amd64.s", "add_arm64.s"), direct_dependencies=(), minimum_go_version=None, ) result = rule_runner.request(FallibleBuiltGoPackage, [request]) assert result.output is None assert result.exit_code == 1 assert ( result.stdout == ".//add_amd64.s:1: unexpected EOF\nasm: assembly of .//add_amd64.s failed\n" )
def test_incorrect_signatures(rule_runner: RuleRunner) -> None: test_cases = [ ("TestFoo(t *testing.T, a int)", "wrong signature for TestFoo"), ("TestFoo()", "wrong signature for TestFoo"), ("TestFoo(t *testing.B)", "wrong signature for TestFoo"), ("TestFoo(t *testing.M)", "wrong signature for TestFoo"), ("TestFoo(a int)", "wrong signature for TestFoo"), ("BenchmarkFoo(t *testing.B, a int)", "wrong signature for BenchmarkFoo"), ("BenchmarkFoo()", "wrong signature for BenchmarkFoo"), ("BenchmarkFoo(t *testing.T)", "wrong signature for BenchmarkFoo"), ("BenchmarkFoo(t *testing.M)", "wrong signature for BenchmarkFoo"), ("BenchmarkFoo(a int)", "wrong signature for BenchmarkFoo"), ] for test_sig, err_msg in test_cases: input_digest = rule_runner.make_snapshot( { "foo_test.go": dedent(f""" package foo func {test_sig} {{ }} """), }, ).digest with pytest.raises(ExecutionError) as exc_info: rule_runner.request( GeneratedTestMain, [ GenerateTestMainRequest(input_digest, FrozenOrderedSet(["foo_test.go"]), FrozenOrderedSet(), "foo") ], ) assert "" in str(exc_info.value)
def test_embeds_supported(rule_runner: RuleRunner) -> None: go_sources = { "foo.go": dedent( """\ package foo import _ "embed" //go:embed grok.txt var message """ ), "foo_test.go": dedent( """\ package foo import _ "embed" //go:embed test_grok.txt var testMessage """ ), "bar_test.go": dedent( """\ package foo_test import _ "embed" //go:embed xtest_grok.txt var testMessage """ ), } resources = { "grok.txt": "This will be embedded in a Go binary.", "test_grok.txt": "This will be embedded in a Go binary.", "xtest_grok.txt": "This will be embedded in a Go binary.", } rule_runner.write_files( { "BUILD": dedent( """ go_mod(name='mod') go_package(name='pkg', dependencies=[":resources"]) resources( name="resources", sources=["*.txt"], ) """ ), "go.mod": dedent( """\ module go.example.com/foo go 1.17 """ ), **resources, # type: ignore[arg-type] **go_sources, # type: ignore[arg-type] } ) maybe_analysis = rule_runner.request( FallibleFirstPartyPkgAnalysis, [FirstPartyPkgAnalysisRequest(Address("", target_name="pkg"))], ) assert maybe_analysis.analysis is not None analysis = maybe_analysis.analysis assert analysis.embed_patterns == ("grok.txt",) assert analysis.test_embed_patterns == ("test_grok.txt",) assert analysis.xtest_embed_patterns == ("xtest_grok.txt",) maybe_digest = rule_runner.request( FallibleFirstPartyPkgDigest, [FirstPartyPkgDigestRequest(Address("", target_name="pkg"))], ) assert maybe_digest.pkg_digest is not None pkg_digest = maybe_digest.pkg_digest actual_snapshot = rule_runner.request(Snapshot, [pkg_digest.digest]) expected_snapshot = rule_runner.make_snapshot( { **go_sources, **{os.path.join("__resources__", f): content for f, content in resources.items()}, } ) assert actual_snapshot == expected_snapshot assert pkg_digest.embed_config == EmbedConfig( {"grok.txt": ["grok.txt"]}, {"grok.txt": "__resources__/grok.txt"} ) assert pkg_digest.test_embed_config == EmbedConfig( {"grok.txt": ["grok.txt"], "test_grok.txt": ["test_grok.txt"]}, {"grok.txt": "__resources__/grok.txt", "test_grok.txt": "__resources__/test_grok.txt"}, ) assert pkg_digest.xtest_embed_config == EmbedConfig( {"xtest_grok.txt": ["xtest_grok.txt"]}, {"xtest_grok.txt": "__resources__/xtest_grok.txt"} )
def set_up_go_mod(rule_runner: RuleRunner, go_mod: str, go_sum: str) -> Digest: return rule_runner.make_snapshot({ "go.mod": go_mod, "go.sum": go_sum }).digest
def test_download_and_analyze_all_packages(rule_runner: RuleRunner) -> None: input_digest = rule_runner.make_snapshot({ "go.mod": GO_MOD, "go.sum": GO_SUM }).digest all_packages = rule_runner.request( AllThirdPartyPackages, [AllThirdPartyPackagesRequest(input_digest, "go.mod")]) assert set(all_packages.import_paths_to_pkg_info.keys()) == { "golang.org/x/text/encoding/japanese", "golang.org/x/text/message/catalog", "golang.org/x/text/internal/testtext", "golang.org/x/text/encoding/ianaindex", "golang.org/x/text/cmd/gotext", "golang.org/x/text/width", "golang.org/x/text/internal/format", "rsc.io/sampler", "golang.org/x/text/internal/tag", "golang.org/x/text/unicode/norm", "golang.org/x/text/number", "golang.org/x/text/transform", "golang.org/x/text/internal", "golang.org/x/text/internal/utf8internal", "golang.org/x/text/language/display", "golang.org/x/text/internal/stringset", "golang.org/x/text/encoding/korean", "golang.org/x/text/internal/triegen", "golang.org/x/text/secure/bidirule", "golang.org/x/text/secure/precis", "golang.org/x/text/language", "golang.org/x/text/encoding/unicode/utf32", "golang.org/x/text/internal/colltab", "golang.org/x/text/unicode/rangetable", "golang.org/x/text/encoding/htmlindex", "golang.org/x/text/internal/export/idna", "golang.org/x/text/encoding/charmap", "golang.org/x/text/unicode/cldr", "golang.org/x/text/secure", "golang.org/x/text/internal/ucd", "golang.org/x/text/feature/plural", "golang.org/x/text/unicode", "golang.org/x/text/encoding/traditionalchinese", "golang.org/x/text/runes", "golang.org/x/text/internal/catmsg", "rsc.io/quote/buggy", "golang.org/x/text/encoding/simplifiedchinese", "golang.org/x/text/cases", "golang.org/x/text/encoding/internal", "github.com/google/uuid", "golang.org/x/text/encoding/internal/enctest", "golang.org/x/text/collate/build", "golang.org/x/text", "golang.org/x/text/unicode/bidi", "golang.org/x/text/search", "golang.org/x/text/unicode/runenames", "golang.org/x/text/message", "golang.org/x/text/encoding", "golang.org/x/text/encoding/unicode", "rsc.io/quote", "golang.org/x/text/currency", "golang.org/x/text/internal/number", "golang.org/x/text/collate/tools/colcmp", "golang.org/x/text/encoding/internal/identifier", "golang.org/x/text/collate", "golang.org/x/text/internal/gen", } def assert_pkg_info( import_path: str, dir_path: str, imports: tuple[str, ...], go_files: tuple[str, ...], extra_files: tuple[str, ...], minimum_go_version: str | None, ) -> None: assert import_path in all_packages.import_paths_to_pkg_info pkg_info = all_packages.import_paths_to_pkg_info[import_path] assert pkg_info.import_path == import_path assert pkg_info.dir_path == dir_path assert pkg_info.imports == imports assert pkg_info.go_files == go_files assert not pkg_info.s_files snapshot = rule_runner.request(Snapshot, [pkg_info.digest]) assert set(snapshot.files) == { os.path.join(dir_path, file_name) for file_name in (*go_files, *extra_files) } assert pkg_info.minimum_go_version == minimum_go_version assert_pkg_info( import_path="github.com/google/uuid", dir_path="gopath/pkg/mod/github.com/google/[email protected]", imports=( "bytes", "crypto/md5", "crypto/rand", "crypto/sha1", "database/sql/driver", "encoding/binary", "encoding/hex", "encoding/json", "errors", "fmt", "hash", "io", "net", "os", "strings", "sync", "time", ), go_files=( "dce.go", "doc.go", "hash.go", "marshal.go", "node.go", "node_net.go", "null.go", "sql.go", "time.go", "util.go", "uuid.go", "version1.go", "version4.go", ), extra_files=( ".travis.yml", "CONTRIBUTING.md", "CONTRIBUTORS", "LICENSE", "README.md", "go.mod", "json_test.go", "node_js.go", "null_test.go", "seq_test.go", "sql_test.go", "uuid_test.go", ), minimum_go_version=None, ) assert_pkg_info( import_path="golang.org/x/text/unicode/bidi", dir_path= "gopath/pkg/mod/golang.org/x/[email protected]/unicode/bidi", imports=("container/list", "fmt", "log", "sort", "unicode/utf8"), go_files=("bidi.go", "bracket.go", "core.go", "prop.go", "tables.go", "trieval.go"), extra_files=( "core_test.go", "gen.go", "gen_ranges.go", "gen_trieval.go", "ranges_test.go", "tables_test.go", ), minimum_go_version=None, )
def test_build_pkg(rule_runner: RuleRunner) -> None: transitive_dep = BuildGoPackageRequest( import_path="example.com/foo/dep/transitive", subpath="dep/transitive", go_file_names=("f.go", ), digest=rule_runner.make_snapshot({ "dep/transitive/f.go": dedent("""\ package transitive import "fmt" func Quote(s string) string { return fmt.Sprintf(">> %s <<", s) } """) }).digest, s_file_names=(), direct_dependencies=(), minimum_go_version=None, ) direct_dep = BuildGoPackageRequest( import_path="example.com/foo/dep", subpath="dep", go_file_names=("f.go", ), digest=rule_runner.make_snapshot({ "dep/f.go": dedent("""\ package dep import "example.com/foo/dep/transitive" func Quote(s string) string { return transitive.Quote(s) } """) }).digest, s_file_names=(), direct_dependencies=(transitive_dep, ), minimum_go_version=None, ) main = BuildGoPackageRequest( import_path="example.com/foo", subpath="", go_file_names=("f.go", ), digest=rule_runner.make_snapshot({ "f.go": dedent("""\ package foo import "example.com/foo/dep" import "fmt" func main() { fmt.Println(dep.Quote("Hello world!")) } """) }).digest, s_file_names=(), direct_dependencies=(direct_dep, ), minimum_go_version=None, ) assert_built(rule_runner, transitive_dep, expected_import_paths=["example.com/foo/dep/transitive"]) assert_built( rule_runner, direct_dep, expected_import_paths=[ "example.com/foo/dep", "example.com/foo/dep/transitive" ], ) assert_built( rule_runner, main, expected_import_paths=[ "example.com/foo", "example.com/foo/dep", "example.com/foo/dep/transitive", ], )
def merged_digest(rule_runner: RuleRunner) -> Digest: return rule_runner.make_snapshot({ fc.path: fc.content.decode() for fc in (FORTRAN_FILE, SMALLTALK_FILE) }).digest
def fortran_digest(rule_runner: RuleRunner) -> Digest: return rule_runner.make_snapshot({ FORTRAN_FILE.path: FORTRAN_FILE.content.decode() }).digest