def test_save_cache(self, cli_fs_runner): with open(".cache_ggshield", "w") as file: json.dump({}, file) cache = Cache() cache.update_cache(**{"last_found_secrets": {"XXX"}}) cache.save() with open(".cache_ggshield", "r") as file: file_content = json.load(file) assert file_content == {"last_found_secrets": ["XXX"]}
def scan( self, client: GGClient, cache: Cache, matches_ignore: Iterable[str], all_policies: bool, verbose: bool, ) -> List[Result]: cache.purge() scannable_list = self.scannable_list results = [] chunks = [] for i in range(0, len(scannable_list), MULTI_DOCUMENT_LIMIT): chunks.append(scannable_list[i:i + MULTI_DOCUMENT_LIMIT]) with concurrent.futures.ThreadPoolExecutor( max_workers=min(CPU_COUNT, 4), thread_name_prefix="content_scan") as executor: future_to_scan = { executor.submit(client.multi_content_scan, chunk): chunk for chunk in chunks } for future in concurrent.futures.as_completed(future_to_scan): chunk = future_to_scan[future] scan = future.result() if not scan.success: handle_scan_error(scan, chunk) continue for index, scanned in enumerate(scan.scan_results): remove_ignored_from_result(scanned, all_policies, matches_ignore) if scanned.has_policy_breaks: for policy_break in scanned.policy_breaks: cache.add_found_policy_break( policy_break, chunk[index]["filename"]) results.append( Result( content=chunk[index]["document"], scan=scanned, filemode=chunk[index]["filemode"], filename=chunk[index]["filename"], )) cache.save() return results
def test_save_cache_first_time(self, cli_fs_runner): os.remove(".cache_ggshield") cache = Cache() cache.save() assert os.path.isfile(".cache_ggshield") is True