def test_file_content_invalidated_after_parent_deletion(self) -> None: """Test that FileContent is invalidated after deleting parent directory.""" with self.mk_project_tree() as project_tree: scheduler = self.mk_scheduler( rules=[*fs_rules(), QueryRule(Snapshot, (PathGlobs, ))], project_tree=project_tree, ) fname = "a/b/1.txt" # read the original file so we have nodes to invalidate. original_content = self.read_digest_contents(scheduler, [fname]) self.assertIn(fname, original_content) path_to_parent_dir = os.path.join(project_tree.build_root, "a/b/") shutil.rmtree(path_to_parent_dir) def assertion_fn(): new_content = self.read_digest_contents(scheduler, [fname]) if new_content.get(fname) is None: return True return False if not self.try_with_backoff(assertion_fn): raise AssertionError( "Deleting parent dir and could still read file from original snapshot." )
def assert_mutated_digest( self, mutation_function: Callable[[FileSystemProjectTree, str], Exception] ) -> None: with self.mk_project_tree() as project_tree: scheduler = self.mk_scheduler( rules=[*fs_rules(), QueryRule(Snapshot, (PathGlobs, ))], project_tree=project_tree, ) dir_path = "a/" dir_glob = f"{dir_path}/*" initial_snapshot = self.execute_expecting_one_result( scheduler, Snapshot, PathGlobs([dir_glob])).value assert initial_snapshot != EMPTY_SNAPSHOT assertion_error = mutation_function(project_tree, dir_path) def assertion_fn() -> bool: new_snapshot = self.execute_expecting_one_result( scheduler, Snapshot, PathGlobs([dir_glob])).value assert new_snapshot != EMPTY_SNAPSHOT if initial_snapshot.digest != new_snapshot.digest: # successfully invalidated snapshot and got a new digest return True return False if not self.try_with_backoff(assertion_fn): raise assertion_error
def test_multiple_snapshots_from_outside_buildroot(self) -> None: with temporary_dir() as temp_dir: Path(temp_dir, "roland").write_text("European Burmese") Path(temp_dir, "susannah").write_text("I don't know") scheduler = self.mk_scheduler(rules=fs_rules()) snapshots = scheduler.capture_snapshots(( PathGlobsAndRoot(PathGlobs(["roland"]), temp_dir), PathGlobsAndRoot(PathGlobs(["susannah"]), temp_dir), PathGlobsAndRoot(PathGlobs(["doesnotexist"]), temp_dir), )) assert 3 == len(snapshots) self.assert_snapshot_equals( snapshots[0], ["roland"], Digest( "63949aa823baf765eff07b946050d76ec0033144c785a94d3ebd82baa931cd16", 80), ) self.assert_snapshot_equals( snapshots[1], ["susannah"], Digest( "d3539cfc21eb4bab328ca9173144a8e932c515b1b9e26695454eeedbc5a95f6f", 82), ) self.assert_snapshot_equals(snapshots[2], [], EMPTY_DIGEST)
def test_file_content_invalidated(self) -> None: """Test that we can update files and have the native engine invalidate previous operations on those files.""" with self.mk_project_tree() as project_tree: scheduler = self.mk_scheduler( rules=[*fs_rules(), QueryRule(Snapshot, (PathGlobs, ))], project_tree=project_tree, ) fname = "4.txt" new_data = "rouf" # read the original file so we have a cached value. self.read_digest_contents(scheduler, [fname]) path_to_fname = os.path.join(project_tree.build_root, fname) with open(path_to_fname, "w") as f: f.write(new_data) def assertion_fn() -> bool: new_content = self.read_digest_contents(scheduler, [fname]) if new_content[fname].decode() == new_data: # successfully read new data return True return False if not self.try_with_backoff(assertion_fn): raise AssertionError( f"New content {new_data} was not found in the FilesContent of the " "modified file {path_to_fname}, instead we found {new_content[fname]}" )
def assert_digest(self, filespecs_or_globs, expected_files): with self.mk_project_tree() as project_tree: scheduler = self.mk_scheduler(rules=fs_rules(), project_tree=project_tree) result = self.execute(scheduler, Snapshot, self.path_globs(filespecs_or_globs))[0] # Confirm all expected files were digested. assert set(expected_files) == set(result.files) assert result.digest.fingerprint is not None
def test_download_https(self) -> None: # Note that this also tests that the custom certs functionality works. with temporary_dir() as temp_dir: def write_resource(name: str) -> Path: path = Path(temp_dir) / name data = pkgutil.get_data("pants.engine.internals", f"tls_testing/rsa/{name}") assert data is not None path.write_bytes(data) return path server_cert = write_resource("server.crt") server_key = write_resource("server.key") cert_chain = write_resource("server.chain") scheduler = self.mk_scheduler( rules=[*fs_rules(), QueryRule(Snapshot, (DownloadFile, ))], ca_certs_path=str(cert_chain), ) with self.isolated_local_store(): ssl_context = ssl.SSLContext() ssl_context.load_cert_chain(certfile=str(server_cert), keyfile=str(server_key)) with http_server(StubHandler, ssl_context=ssl_context) as port: snapshot = self.execute( scheduler, Snapshot, DownloadFile(f"https://localhost:{port}/file.txt", self.file_digest), )[0] self.assert_snapshot_equals(snapshot, ["file.txt"], self.expected_snapshot_digest)
def assert_content(self, filespecs_or_globs, expected_content): with self.mk_project_tree() as project_tree: scheduler = self.mk_scheduler( rules=[*fs_rules(), QueryRule(Snapshot, (PathGlobs, ))], project_tree=project_tree) actual_content = self.read_digest_contents(scheduler, filespecs_or_globs) assert expected_content == actual_content
def assert_walk_snapshot( self, field, filespecs_or_globs, paths, ignore_patterns=None, prepare=None ): with self.mk_project_tree(ignore_patterns=ignore_patterns) as project_tree: scheduler = self.mk_scheduler(rules=fs_rules(), project_tree=project_tree) if prepare: prepare(project_tree) result = self.execute(scheduler, Snapshot, self.path_globs(filespecs_or_globs))[0] assert sorted(getattr(result, field)) == sorted(paths)
def assert_content(self, filespecs_or_globs, expected_content): with self.mk_project_tree() as project_tree: scheduler = self.mk_scheduler(rules=fs_rules(), project_tree=project_tree) actual_content = self.read_digest_contents(scheduler, filespecs_or_globs) assert expected_content == actual_content