def testProcessCollectedArtifacts(self): """Test downloading files from artifacts.""" client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") client.Set(client.Schema.SYSTEM("Windows")) client.Set(client.Schema.OS_VERSION("6.2")) client.Flush() vfs.VFS_HANDLERS[rdfvalue.PathSpec.PathType. REGISTRY] = test_lib.FakeRegistryVFSHandler vfs.VFS_HANDLERS[ rdfvalue.PathSpec.PathType.OS] = test_lib.FakeFullVFSHandler client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "Find", "HashBuffer", "FingerprintFile", "ListDirectory") # Get KB initialized for _ in test_lib.TestFlowHelper("KnowledgeBaseInitializationFlow", client_mock, client_id=self.client_id, token=self.token): pass artifact_list = ["WindowsPersistenceMechanismFiles"] with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id, output="analysis/{p}/{u}-{t}", split_output_by_artifact=True): pass # Check MultiGetFile got called for our runkey files # TODO(user): RunKeys for S-1-5-20 are not found because users.sid only # expands to users with profiles. pathspecs = getfile_instrument.args[0][0].args.pathspecs self.assertItemsEqual([x.path for x in pathspecs], [u"C:\\Windows\\TEMP\\A.exe"]) artifact_list = ["BadPathspecArtifact"] with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id, output="analysis/{p}/{u}-{t}", split_output_by_artifact=True): pass self.assertFalse(getfile_instrument.args)
def testDiskVolumeInfoWindows(self): client = aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) client.Set(client.Schema.SYSTEM("Windows")) client.Flush() vfs.VFS_HANDLERS[rdfvalue.PathSpec.PathType. REGISTRY] = test_lib.FakeRegistryVFSHandler client_mock = action_mocks.WindowsVolumeClientMock( "StatFile", "ListDirectory") with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper( "DiskVolumeInfo", client_mock, client_id=self.client_id, token=self.token, path_list=[r"D:\temp\something", r"/var/tmp"]): pass results = [] for cls, reply in send_reply.args: if isinstance(cls, filesystem.DiskVolumeInfo) and isinstance( reply, rdfvalue.Volume): results.append(reply) # We asked for D and we guessed systemroot (C) for "/var/tmp", but only C # and Z are present, so we should just get C. self.assertItemsEqual([x.windows.drive_letter for x in results], ["C:"]) self.assertEqual(len(results), 1) with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper("DiskVolumeInfo", client_mock, client_id=self.client_id, token=self.token, path_list=[r"Z:\blah"]): pass results = [] for cls, reply in send_reply.args: if isinstance(cls, filesystem.DiskVolumeInfo) and isinstance( reply, rdfvalue.Volume): results.append(reply) self.assertItemsEqual([x.windows.drive_letter for x in results], ["Z:"]) self.assertEqual(len(results), 1)
def testDiskVolumeInfoWindows(self): self.SetupClients(1, system="Windows") with test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.REGISTRY, test_lib.FakeRegistryVFSHandler): client_mock = action_mocks.WindowsVolumeClientMock( "StatFile", "ListDirectory") with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper( "DiskVolumeInfo", client_mock, client_id=self.client_id, token=self.token, path_list=[r"D:\temp\something", r"/var/tmp"]): pass results = [] for cls, reply in send_reply.args: if isinstance(cls, filesystem.DiskVolumeInfo) and isinstance( reply, rdf_client.Volume): results.append(reply) # We asked for D and we guessed systemroot (C) for "/var/tmp", but only # C and Z are present, so we should just get C. self.assertItemsEqual( [x.windowsvolume.drive_letter for x in results], ["C:"]) self.assertEqual(len(results), 1) with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper("DiskVolumeInfo", client_mock, client_id=self.client_id, token=self.token, path_list=[r"Z:\blah"]): pass results = [] for cls, reply in send_reply.args: if isinstance(cls, filesystem.DiskVolumeInfo) and isinstance( reply, rdf_client.Volume): results.append(reply) self.assertItemsEqual( [x.windowsvolume.drive_letter for x in results], ["Z:"]) self.assertEqual(len(results), 1)
def testRunSuccess(self): args = rdfvalue.EndToEndTestFlowArgs(test_names=[ "TestListDirectoryOSLinuxDarwin", "MockEndToEndTest", "TestListDirectoryOSLinuxDarwin" ]) with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper("EndToEndTestFlow", self.client_mock, client_id=self.client_id, token=self.token, args=args): pass results = [] for _, reply in send_reply.args: if isinstance(reply, rdfvalue.EndToEndTestResult): results.append(reply) self.assertTrue(reply.success) self.assertTrue( reply.test_class_name in ["TestListDirectoryOSLinuxDarwin", "MockEndToEndTest"]) self.assertFalse(reply.log) # We only expect 2 results because we dedup test names self.assertEqual(len(results), 2)
def testRunSuccessAndFail(self): args = rdfvalue.EndToEndTestFlowArgs() with utils.Stubber(base.AutomatedTest, "classes", { "MockEndToEndTest": MockEndToEndTest, "TestFailure": TestFailure }): with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper("EndToEndTestFlow", self.client_mock, client_id=self.client_id, token=self.token, args=args): pass results = [] for _, reply in send_reply.args: if isinstance(reply, rdfvalue.EndToEndTestResult): results.append(reply) if reply.test_class_name == "MockEndToEndTest": self.assertTrue(reply.success) self.assertFalse(reply.log) elif reply.test_class_name == "TestFailure": self.assertFalse(reply.success) self.assertTrue( "This should be logged" in reply.log) self.assertItemsEqual([x.test_class_name for x in results], ["MockEndToEndTest", "TestFailure"]) self.assertEqual(len(results), 2)
def testMultiGetFile(self): """Test MultiGetFile.""" client_mock = action_mocks.ActionMock("TransferBuffer", "HashFile", "StatFile", "HashBuffer") pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "test_img.dd")) args = transfer.MultiGetFileArgs(pathspecs=[pathspec, pathspec]) with test_lib.Instrument( transfer.MultiGetFile, "StoreStat") as storestat_instrument: for _ in test_lib.TestFlowHelper("MultiGetFile", client_mock, token=self.token, client_id=self.client_id, args=args): pass # We should only have called StoreStat once because the two paths # requested were identical. self.assertEqual(len(storestat_instrument.args), 1) # Fix path for Windows testing. pathspec.path = pathspec.path.replace("\\", "/") # Test the AFF4 file that was created. urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(pathspec, self.client_id) fd1 = aff4.FACTORY.Open(urn, token=self.token) fd2 = open(pathspec.path) fd2.seek(0, 2) self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE))) self.CompareFDs(fd1, fd2)
def _testProcessCollectedArtifacts(self): client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "Find", "HashBuffer", "HashFile", "FingerprintFile", "ListDirectory") # Get KB initialized for _ in test_lib.TestFlowHelper( "KnowledgeBaseInitializationFlow", client_mock, client_id=self.client_id, token=self.token): pass artifact_list = ["WindowsPersistenceMechanismFiles"] with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: for _ in test_lib.TestFlowHelper( "ArtifactCollectorFlow", client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id, split_output_by_artifact=True): pass # Check MultiGetFile got called for our runkey files # TODO(user): RunKeys for S-1-5-20 are not found because users.sid only # expands to users with profiles. pathspecs = getfile_instrument.args[0][0].args.pathspecs self.assertItemsEqual([x.path for x in pathspecs], [u"C:\\Windows\\TEMP\\A.exe"]) artifact_list = ["BadPathspecArtifact"] with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: for _ in test_lib.TestFlowHelper( "ArtifactCollectorFlow", client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id, split_output_by_artifact=True): pass self.assertFalse(getfile_instrument.args)
def testProgressThrottling(self): action = actions.ActionPlugin.classes["ProgressAction"]() with test_lib.Instrument(client_utils, "KeepAlive") as instrument: for time, expected_count in [(100, 1), (101, 1), (102, 1), (103, 2), (104, 2), (105, 2), (106, 3)]: with test_lib.FakeTime(time): action.Progress() self.assertEqual(instrument.call_count, expected_count)
def testFetchFilesGlobFlow(self): # Very small chunks to stress test this flow. with test_lib.MultiStubber( (transfer.MultiGetFile, "CHUNK_SIZE", self.chunk_size), (transfer.MultiGetFile, "MIN_CALL_TO_FILE_STORE", 3)): with test_lib.Instrument(filestore.FileStore, "CheckHashes") as check_hashes_instrument: self.base_pathspec = rdfvalue.PathSpec( path=os.path.join(self.base_path, "winexec_img.dd"), pathtype=rdfvalue.PathSpec.PathType.OS) self.base_pathspec.Append( path="/", pathtype=rdfvalue.PathSpec.PathType.TSK) inspect_path = self.base_pathspec.Copy() inspect_path.AppendPath("Ext2IFS_1_10b.exe") urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN( inspect_path, self.client_id) fd = aff4.FACTORY.Create(urn, "AFF4MemoryStream", token=self.token) fd.Write("hello") fd.Close() # Now run the fetch all files. client_mock = test_lib.ActionMock("TransferBuffer", "StatFile", "Find", "HashFile", "HashBuffer") for _ in test_lib.TestFlowHelper( "FetchFiles", client_mock, token=self.token, paths=["*.exe", "*.sys"], root_path=self.base_pathspec, pathtype=rdfvalue.PathSpec.PathType.OS, client_id=self.client_id): pass self.CheckFindExeFiles() self.CheckPresenceOfSignedData() self.CheckIndexLookup() pathlist = ["pciide.sys"] self.CheckExistingFile(pathlist) # In this test we limit the maximum number of times the filestore check # hashes is called to 3. There are 7 hits in the test data, so we # expect 3 calls, of 3, 3, and 1: self.assertEqual(len(check_hashes_instrument.args), 3) self.assertEqual(len(check_hashes_instrument.args[0][1]), 3) self.assertEqual(len(check_hashes_instrument.args[1][1]), 3) self.assertEqual(len(check_hashes_instrument.args[2][1]), 1)
def RunFlow(self): session_id = None with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for session_id in test_lib.TestFlowHelper( "CheckRunner", client_mock=self.client_mock, client_id=self.client_id, token=self.token): pass session = aff4.FACTORY.Open(session_id, token=self.token) results = {r.check_id: r for _, r in send_reply.args if isinstance( r, checks.CheckResult)} return session, results
def RunFlow(self): session_id = None with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for session_id in test_lib.TestFlowHelper( "CheckRunner", self.client_mock, token=self.token, client_id=self.client_id): pass session = aff4.FACTORY.Open(session_id, token=self.token) replies = send_reply return session, replies
def testFileOutput(self): """Tests that a file can be written by a plugin and retrieved.""" request = rdf_rekall_types.RekallRequest() request.plugins = [ # Run procdump to create one file. rdf_rekall_types.PluginRequest( plugin="procdump", args=dict(pid=2860))] with test_lib.Instrument(transfer.MultiGetFile, "StoreStat") as storestat_instrument: self.LaunchRekallPlugin(request) # Expect one file to be downloaded. self.assertEqual(storestat_instrument.call_count, 1)
def RunFlow(self, paths=None, conditions=None, action=file_finder.FileFinderAction.Action.STAT): send_reply = test_lib.Instrument(flow.GRRFlow, "SendReply") with send_reply: for _ in test_lib.TestFlowHelper( "FileFinder", self.client_mock, client_id=self.client_id, paths=paths or [self.path], pathtype=rdfvalue.PathSpec.PathType.OS, action=file_finder.FileFinderAction( action_type=action), conditions=conditions, token=self.token, output=self.output_path): pass return send_reply.args
def testListing(self): test_urn = "aff4:/sequential_collection/testIndexedListing" collection = self._TestCollection(test_urn) timestamps = [] for i in range(100): timestamps.append(collection.Add(rdfvalue.RDFInteger(i))) with test_lib.Instrument(sequential_collection.SequentialCollection, "Scan") as scan: self.assertEqual(len(list(collection)), 100) # Listing should be done using a single scan but there is another one # for calculating the length. self.assertEqual(scan.call_count, 2)
def testFingerprintPresence(self): path = os.path.join(self.base_path, "winexec_img.dd") pathspec = rdfvalue.PathSpec(pathtype=rdfvalue.PathSpec.PathType.OS, path=path) pathspec.Append(path="/winpmem-amd64.sys", pathtype=rdfvalue.PathSpec.PathType.TSK) client_mock = action_mocks.ActionMock("FingerprintFile") with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper("FingerprintFile", client_mock, token=self.token, client_id=self.client_id, pathspec=pathspec): pass self.assertEqual(len(send_reply.args), 1) for _, reply in send_reply.args: self.assertTrue( isinstance(reply, rdfvalue.FingerprintFileResult)) self.assertTrue( str(reply.file_urn).endswith( "test_data/winexec_img.dd/winpmem-amd64.sys")) self.assertEqual( str(reply.hash_entry.sha256), "40ac571d6d85d669a9a19d498d9f926525481430056ff65746f" "baf36bee8855f") self.assertEqual(str(reply.hash_entry.sha1), "6e17df1a1020a152f2bf4445d1004b192ae8e42d") self.assertEqual(str(reply.hash_entry.md5), "12be1109aa3d3b46c9398972af2008e1") urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN( pathspec, self.client_id) fd = aff4.FACTORY.Open(urn, token=self.token) self.assertEqual(fd.__class__, aff4_grr.VFSFile) # FINGERPRINT is deprecated in favour of HASH, check it anyway fingerprint = fd.Get(fd.Schema.FINGERPRINT) pecoff = fingerprint.GetFingerprint("pecoff")["sha1"].encode("hex") self.assertEqual(pecoff, "1f32fa4eedfba023653c094143d90999f6b9bc4f") hash_obj = fd.Get(fd.Schema.HASH) self.assertEqual(hash_obj.pecoff_sha1, "1f32fa4eedfba023653c094143d90999f6b9bc4f") self.assertEqual(hash_obj.signed_data[0].revision, 512)
def RunFlow(self, paths=None, conditions=None, action=None): send_reply = test_lib.Instrument(flow.GRRFlow, "SendReply") with send_reply: for s in test_lib.TestFlowHelper( file_finder.FileFinder.__name__, self.client_mock, client_id=self.client_id, paths=paths or [self.path], pathtype=rdf_paths.PathSpec.PathType.OS, action=action, conditions=conditions, token=self.token): self.last_session_id = s return send_reply.args
def testDiskVolumeInfoOSXLinux(self): client_mock = action_mocks.UnixVolumeClientMock("StatFile", "ListDirectory") with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper( "DiskVolumeInfo", client_mock, client_id=self.client_id, token=self.token, path_list=["/usr/local", "/home"]): pass results = [] for _, reply in send_reply.args: if isinstance(reply, rdf_client.Volume): results.append(reply) self.assertItemsEqual([x.unixvolume.mount_point for x in results], ["/", "/usr"]) self.assertEqual(len(results), 2)
def testCollectRunKeyBinaries(self): """Read Run key from the client_fixtures to test parsing and storage.""" test_lib.ClientFixture(self.client_id, token=self.token) client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") client.Set(client.Schema.SYSTEM("Windows")) client.Set(client.Schema.OS_VERSION("6.2")) client.Flush() vfs.VFS_HANDLERS[rdf_paths.PathSpec.PathType. REGISTRY] = test_lib.FakeRegistryVFSHandler vfs.VFS_HANDLERS[ rdf_paths.PathSpec.PathType.OS] = test_lib.FakeFullVFSHandler client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "Find", "HashBuffer", "FingerprintFile", "ListDirectory") # Get KB initialized for _ in test_lib.TestFlowHelper("KnowledgeBaseInitializationFlow", client_mock, client_id=self.client_id, token=self.token): pass with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: # Run the flow in the emulated way. for _ in test_lib.TestFlowHelper("CollectRunKeyBinaries", client_mock, client_id=self.client_id, token=self.token): pass # Check MultiGetFile got called for our runkey file download_requested = False for pathspec in getfile_instrument.args[0][0].args.pathspecs: if pathspec.path == u"C:\\Windows\\TEMP\\A.exe": download_requested = True self.assertTrue(download_requested)
def RunFlowAndCheckResults(self, conditions=None, action=rdfvalue.FileFinderAction.Action.STAT, expected_files=None, non_expected_files=None): conditions = conditions or [] expected_files = expected_files or [] non_expected_files = non_expected_files or [] for fname in expected_files + non_expected_files: aff4.FACTORY.Delete(self.FileNameToURN(fname), token=self.token) with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper( "FileFinder", self.client_mock, client_id=self.client_id, paths=[self.path], pathtype=rdfvalue.PathSpec.PathType.OS, action=rdfvalue.FileFinderAction(action_type=action), conditions=conditions, token=self.token, output=self.output_path): pass self.CheckReplies(send_reply.args, action, expected_files) self.CheckFilesInCollection(expected_files) if action == rdfvalue.FileFinderAction.Action.STAT: self.CheckFilesNotDownloaded(expected_files + non_expected_files) self.CheckFilesNotHashed(expected_files + non_expected_files) elif action == rdfvalue.FileFinderAction.Action.DOWNLOAD: self.CheckFilesDownloaded(expected_files) self.CheckFilesNotDownloaded(non_expected_files) # Downloaded files are hashed to allow for deduping. elif action == rdfvalue.FileFinderAction.Action.HASH: self.CheckFilesNotDownloaded(expected_files + non_expected_files) self.CheckFilesHashed(expected_files) self.CheckFilesNotHashed(non_expected_files)
def testEndToEndTestFailure(self): args = rdfvalue.EndToEndTestFlowArgs(test_names=["TestFailure"]) with test_lib.Instrument(flow.GRRFlow, "SendReply") as send_reply: for _ in test_lib.TestFlowHelper("EndToEndTestFlow", self.client_mock, client_id=self.client_id, token=self.token, args=args): pass results = [] for _, reply in send_reply.args: if isinstance(reply, rdfvalue.EndToEndTestResult): results.append(reply) self.assertFalse(reply.success) self.assertEqual(reply.test_class_name, "TestFailure") self.assertTrue("This should be logged" in reply.log) self.assertEqual(len(results), 1)
def testUpload(self): magic_string = "Hello world" test_file = os.path.join(self.temp_dir, "sample.txt") with open(test_file, "wb") as fd: fd.write(magic_string) args = rdf_client.UploadFileRequest() args.pathspec.path = test_file args.pathspec.pathtype = "OS" # Errors are logged on the server but not always provided to the client. We # check the server logs for the errors we inject. with test_lib.Instrument(logging, "error") as logger: # First do not provide a hmac at all. with self.assertRaises(IOError): self._UploadFile(args) self.assertRegexpMatches("HMAC not provided", str(logger.args)) logger.args[:] = [] # Now pass a rubbish HMAC but forget to give a policy. args.hmac = transfer.GetHMAC().HMAC("This is the wrong filename") with self.assertRaises(IOError): self._UploadFile(args) self.assertRegexpMatches("Policy not provided", str(logger.args)) logger.args[:] = [] # Ok - lets make an expired policy, Still wrong HMAC. policy = rdf_client.UploadPolicy( client_id=self.client_id, filename=args.pathspec.CollapsePath(), expires=1000) args.policy = policy.SerializeToString() with self.assertRaises(IOError): self._UploadFile(args) self.assertRegexpMatches("Signature did not match digest", str(logger.args)) logger.args[:] = [] # Ok lets hmac the policy now, but its still too old. args.hmac = transfer.GetHMAC().HMAC(args.policy) with self.assertRaises(IOError): self._UploadFile(args) # Make sure the file is not written yet. rootdir = config_lib.CONFIG["FileUploadFileStore.root_dir"] target_filename = os.path.join( rootdir, self.client_id.Add(test_file).Path().lstrip(os.path.sep)) self.assertNotEqual(target_filename, test_file) with self.assertRaises(IOError): open(target_filename) self.assertRegexpMatches("Client upload policy is too old", str(logger.args)) logger.args[:] = [] # Lets expire the policy in the future. policy.expires = rdfvalue.RDFDatetime.Now() + 1000 args.policy = policy.SerializeToString() args.hmac = transfer.GetHMAC().HMAC(args.policy) self._UploadFile(args) # Make sure the file was uploaded correctly. with open(target_filename) as fd: data = fd.read() # The stored data is actually gzip compressed. uncompressed_data = gzip.GzipFile( fileobj=StringIO.StringIO(data)).read() self.assertEqual(uncompressed_data, magic_string)
def testFetchFilesFlow(self): # Very small chunks to stress test this flow. with test_lib.MultiStubber( (transfer.MultiGetFile, "CHUNK_SIZE", self.chunk_size), (transfer.MultiGetFile, "MIN_CALL_TO_FILE_STORE", 10)): with test_lib.Instrument(filestore.FileStore, "CheckHashes") as check_hashes_instrument: path = os.path.join(self.base_path, "winexec_img.dd") self.findspec = rdfvalue.FindSpec(path_regex=r"\.(exe|sys)$") self.findspec.pathspec.path = path self.findspec.pathspec.pathtype = rdfvalue.PathSpec.PathType.OS self.findspec.pathspec.Append( path="/", pathtype=rdfvalue.PathSpec.PathType.TSK) self.base_pathspec = self.findspec.pathspec.Copy() # First create some existing files in the VFS so we can ensure they get # updated. inspect_path = self.base_pathspec.Copy() inspect_path.AppendPath("Ext2IFS_1_10b.exe") urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN( inspect_path, self.client_id) fd = aff4.FACTORY.Create(urn, "AFF4MemoryStream", token=self.token) fd.Write("hello") fd.Close() # Now run the fetch all files. client_mock = test_lib.ActionMock("TransferBuffer", "StatFile", "Find", "HashFile", "HashBuffer") for _ in test_lib.TestFlowHelper("FetchFiles", client_mock, token=self.token, client_id=self.client_id, findspec=self.findspec): pass self.CheckFindExeFiles() self.CheckPresenceOfSignedData() self.CheckIndexLookup() pathlist = [ "/a/b/c/g/f/pciide.sys", "pciide.sys", "/a/b/c/g/h/pciide.sys", "/a/b/c/g/pciide.sys" ] self.CheckExistingFile(pathlist) # In this test we limit the maximum number of times the filestore check # hashes is called to 10. There are 23 hits in the test data, so we # expect 3 calls, of 10, 10, and 3: self.assertEqual(len(check_hashes_instrument.args), 3) self.assertEqual(len(check_hashes_instrument.args[0][1]), 10) self.assertEqual(len(check_hashes_instrument.args[1][1]), 10) self.assertEqual(len(check_hashes_instrument.args[2][1]), 3) fd = aff4.FACTORY.Open( self.client_id.Add("analysis/FetchFiles"), token=self.token) collection = list(fd.OpenChildren())[0] self.assertEqual(len(collection), 23)