def test_output_digest(rule_runner: RuleRunner, working_directory) -> None: # Test that the output files are relative to the working directory, both in how # they're specified, and their paths in the output_digest. input_digest = (rule_runner.request( Digest, [CreateDigest([Directory(working_directory)])], ) if working_directory else EMPTY_DIGEST) process = Process( input_digest=input_digest, argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"), description="echo roland", output_files=("roland", ), working_directory=working_directory, ) result = rule_runner.request(ProcessResult, [process]) assert result.output_digest == Digest( fingerprint= "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16", serialized_bytes_length=80, ) digest_contents = rule_runner.request(DigestContents, [result.output_digest]) assert digest_contents == DigestContents( [FileContent("roland", b"European Burmese", False)])
def mock_read_config(_: PathGlobs) -> DigestContents: # This shouldn't be called if no config file provided. assert coverage_config is not None return DigestContents([ FileContent(path="/dev/null/prelude", content=coverage_config.encode()) ])
def test_non_archive(self) -> None: input_snapshot = self.make_snapshot({"test.sh": b"# A shell script"}) extracted_digest = self.request_single_product( ExtractedDigest, Params(MaybeExtractable(input_snapshot.digest))) digest_contents = self.request_single_product( DigestContents, Params(extracted_digest.digest)) assert DigestContents([FileContent("test.sh", b"# A shell script") ]) == digest_contents
def test_extract_non_archive(rule_runner: RuleRunner) -> None: input_snapshot = rule_runner.make_snapshot( {"test.sh": b"# A shell script"}) extracted_archive = rule_runner.request(ExtractedArchive, [input_snapshot.digest]) digest_contents = rule_runner.request(DigestContents, [extracted_archive.digest]) assert DigestContents([FileContent("test.sh", b"# A shell script") ]) == digest_contents
def test_non_archive(rule_runner: RuleRunner) -> None: input_snapshot = rule_runner.make_snapshot( {"test.sh": b"# A shell script"}) extracted_digest = rule_runner.request_product( ExtractedDigest, [MaybeExtractable(input_snapshot.digest)]) digest_contents = rule_runner.request_product(DigestContents, [extracted_digest.digest]) assert DigestContents([FileContent("test.sh", b"# A shell script") ]) == digest_contents
def test_extract_gz(extract_from_file_info: ExtractorFixtureT, rule_runner: RuleRunner) -> None: # NB: `gz` files are only compressed, and are not archives: they represent a single file. name = "test" content = b"Hello world!\n" io = BytesIO() with gzip.GzipFile(fileobj=io, mode="w") as gzf: gzf.write(content) io.flush() rule_runner.set_options(args=[], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) digest_contents = extract_from_file_info(".gz", io.getvalue()) assert digest_contents == DigestContents([FileContent(name, content)])
def test_extract_tarlz4(extract_from_file_info: ExtractorFixtureT): if subprocess.run(["lz4", "--help"], check=False).returncode != 0: pytest.skip(reason="lz4 not on PATH") archive_content = base64.b64decode( b"BCJNGGRAp9MAAACfdG1wL21zZy8AAQBI+AAwMDAwNzc1ADAwMDE3NTEIAAQCAP8HADE0MjMxNTUzMjAxADAxNDQwM" b"gAgNZQASAUCAPUFdXN0YXIgIABqb3NodWFjYW5ub24dAAcCAA8gAA0PAgCkBAACf3R4dC50eHTGAEIA5QE4NjY0+" b"AEECAIDAgAUNQAC7zQwNzAAMDE1NzYyACAwjgBCCwIADwAC7FtwYW50cxMBDwIA" b"///////////////////////////////////////////////3UAAAAAAAAAAAABhrfd0=" ) digest_contents = extract_from_file_info(".tar.lz4", archive_content) assert digest_contents == DigestContents([FileContent("tmp/msg/txt.txt", b"pants")])
def test_extract_gz(rule_runner: RuleRunner) -> None: # NB: `gz` files are only compressed, and are not archives: they represent a single file. name = "test" content = b"Hello world!\n" io = BytesIO() with gzip.GzipFile(fileobj=io, mode="w") as gzf: gzf.write(content) io.flush() input_snapshot = rule_runner.make_snapshot({f"{name}.gz": io.getvalue()}) rule_runner.set_options(args=[], env_inherit={"PATH", "PYENV_ROOT", "HOME"}) extracted_archive = rule_runner.request(ExtractedArchive, [input_snapshot.digest]) digest_contents = rule_runner.request(DigestContents, [extracted_archive.digest]) assert digest_contents == DigestContents([FileContent(name, content)])
def run_prelude_parsing_rule(prelude_content: str) -> BuildFilePreludeSymbols: symbols = run_rule_with_mocks( evaluate_preludes, rule_args=[BuildFileOptions((), prelude_globs=("prelude",))], mock_gets=[ MockGet( output_type=DigestContents, input_type=PathGlobs, mock=lambda _: DigestContents( [FileContent(path="/dev/null/prelude", content=prelude_content.encode())] ), ), ], ) return cast(BuildFilePreludeSymbols, symbols)
def test_output_digest(rule_runner: RuleRunner) -> None: process = Process( argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"), description="echo roland", output_files=("roland", ), ) result = rule_runner.request(ProcessResult, [process]) assert result.output_digest == Digest( fingerprint= "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16", serialized_bytes_length=80, ) digest_contents = rule_runner.request(DigestContents, [result.output_digest]) assert digest_contents == DigestContents( [FileContent("roland", b"European Burmese", False)])
def run_prelude_parsing_rule(prelude_content: str) -> BuildFilePreludeSymbols: address_mapper = unittest.mock.Mock() address_mapper.prelude_glob_patterns = ("prelude", ) symbols = run_rule( evaluate_preludes, rule_args=[address_mapper], mock_gets=[ MockGet( product_type=DigestContents, subject_type=PathGlobs, mock=lambda _: DigestContents([ FileContent(path="/dev/null/prelude", content=prelude_content.encode()) ]), ), ], ) return cast(BuildFilePreludeSymbols, symbols)
def run_prelude_parsing_rule(prelude_content: str) -> BuildFilePreludeSymbols: symbols = run_rule_with_mocks( evaluate_preludes, rule_args=[ create_subsystem(GlobalOptions, build_file_prelude_globs=["prelude"]) ], mock_gets=[ MockGet( product_type=DigestContents, subject_type=PathGlobs, mock=lambda _: DigestContents([ FileContent(path="/dev/null/prelude", content=prelude_content.encode()) ]), ), ], ) return cast(BuildFilePreludeSymbols, symbols)
def test_parse_address_family_empty() -> None: """Test that parsing an empty BUILD file results in an empty AddressFamily.""" af = run_rule_with_mocks( parse_address_family, rule_args=[ Parser(build_root="", target_type_aliases=[], object_aliases=BuildFileAliases()), BuildFileOptions(("BUILD",)), BuildFilePreludeSymbols(FrozenDict()), AddressFamilyDir("/dev/null"), ], mock_gets=[ MockGet( output_type=DigestContents, input_type=PathGlobs, mock=lambda _: DigestContents([FileContent(path="/dev/null/BUILD", content=b"")]), ), ], ) assert len(af.name_to_target_adaptors) == 0
def test_parse_address_family_empty() -> None: """Test that parsing an empty BUILD file results in an empty AddressFamily.""" address_mapper = AddressMapper(parser=Parser( target_type_aliases=[], object_aliases=BuildFileAliases())) af = run_rule( parse_address_family, rule_args=[ address_mapper, BuildFilePreludeSymbols(FrozenDict()), Dir("/dev/null") ], mock_gets=[ MockGet( product_type=DigestContents, subject_type=PathGlobs, mock=lambda _: DigestContents( [FileContent(path="/dev/null/BUILD", content=b"")]), ), ], ) assert len(af.name_to_target_adaptors) == 0
def test_write_file(self): request = Process( argv=("/bin/bash", "-c", "echo -n 'European Burmese' > roland"), description="echo roland", output_files=("roland",), ) process_result = self.request_product(ProcessResult, [request]) self.assertEqual( process_result.output_digest, Digest( fingerprint="63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16", serialized_bytes_length=80, ), ) digest_contents = self.request_product(DigestContents, [process_result.output_digest]) assert digest_contents == DigestContents( [FileContent("roland", b"European Burmese", False)] )
def test_parse_address_family_empty() -> None: """Test that parsing an empty BUILD file results in an empty AddressFamily.""" af = run_rule_with_mocks( parse_address_family, rule_args=[ Parser(target_type_aliases=[], object_aliases=BuildFileAliases()), create_subsystem(GlobalOptions, build_patterns=["BUILD"], build_ignore=[]), BuildFilePreludeSymbols(FrozenDict()), Dir("/dev/null"), ], mock_gets=[ MockGet( product_type=DigestContents, subject_type=PathGlobs, mock=lambda _: DigestContents( [FileContent(path="/dev/null/BUILD", content=b"")]), ), ], ) assert len(af.name_to_target_adaptors) == 0
@pytest.fixture def rule_runner() -> RuleRunner: return RuleRunner( rules=[ *archive_rules(), QueryRule(Digest, [CreateArchive]), QueryRule(ExtractedArchive, [Digest]), ], ) FILES = {"foo": b"bar", "hello/world": b"Hello, World!"} EXPECTED_DIGEST_CONTENTS = DigestContents( [FileContent(name, content) for name, content in FILES.items()] ) @pytest.mark.parametrize("compression", [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED]) def test_extract_zip(rule_runner: RuleRunner, compression: int) -> None: io = BytesIO() with zipfile.ZipFile(io, "w", compression=compression) as zf: for name, content in FILES.items(): zf.writestr(name, content) io.flush() input_snapshot = rule_runner.make_snapshot({"test.zip": io.getvalue()}) extracted_archive = rule_runner.request(ExtractedArchive, [input_snapshot.digest]) digest_contents = rule_runner.request(DigestContents, [extracted_archive.digest]) assert digest_contents == EXPECTED_DIGEST_CONTENTS
class ArchiveTest(TestBase): files = {"foo": b"bar", "hello/world": b"Hello, World!"} expected_digest_contents = DigestContents( [FileContent(name, content) for name, content in files.items()]) @classmethod def rules(cls): return (*super().rules(), *archive_rules(), RootRule(Snapshot)) # TODO: Figure out a way to run these tests without a TestBase subclass, and use # pytest.mark.parametrize. def _do_test_extract_zip(self, compression) -> None: io = BytesIO() with zipfile.ZipFile(io, "w", compression=compression) as zf: for name, content in self.files.items(): zf.writestr(name, content) io.flush() input_snapshot = self.make_snapshot({"test.zip": io.getvalue()}) extracted_digest = self.request_single_product( ExtractedDigest, Params(MaybeExtractable(input_snapshot.digest))) digest_contents = self.request_single_product( DigestContents, Params(extracted_digest.digest)) assert self.expected_digest_contents == digest_contents def test_extract_zip_stored(self) -> None: self._do_test_extract_zip(zipfile.ZIP_STORED) def test_extract_zip_deflated(self) -> None: self._do_test_extract_zip(zipfile.ZIP_DEFLATED) # TODO: Figure out a way to run these tests without a TestBase subclass, and use # pytest.mark.parametrize. def _do_test_extract_tar(self, compression) -> None: io = BytesIO() mode = f"w:{compression}" if compression else "w" with tarfile.open(mode=mode, fileobj=io) as tf: for name, content in self.files.items(): tarinfo = tarfile.TarInfo(name) tarinfo.size = len(content) tf.addfile(tarinfo, BytesIO(content)) ext = f"tar.{compression}" if compression else "tar" input_snapshot = self.make_snapshot({f"test.{ext}": io.getvalue()}) extracted_digest = self.request_single_product( ExtractedDigest, Params(MaybeExtractable(input_snapshot.digest))) digest_contents = self.request_single_product( DigestContents, Params(extracted_digest.digest)) assert self.expected_digest_contents == digest_contents def test_extract_tar(self) -> None: self._do_test_extract_tar("") def test_extract_tar_gz(self) -> None: self._do_test_extract_tar("gz") def test_extract_tar_bz2(self) -> None: self._do_test_extract_tar("bz2") def test_extract_tar_xz(self) -> None: self._do_test_extract_tar("xz") def test_non_archive(self) -> None: input_snapshot = self.make_snapshot({"test.sh": b"# A shell script"}) extracted_digest = self.request_single_product( ExtractedDigest, Params(MaybeExtractable(input_snapshot.digest))) digest_contents = self.request_single_product( DigestContents, Params(extracted_digest.digest)) assert DigestContents([FileContent("test.sh", b"# A shell script") ]) == digest_contents
def mock_read_existing_config(_: Digest) -> DigestContents: # This shouldn't be called if no config file provided. assert path is not None assert content is not None return DigestContents([FileContent(path, content.encode())])