def testExecutePythonHack(self): client_mock = action_mocks.ActionMock("ExecutePython") # This is the code we test. If this runs on the client mock we can check for # this attribute. sys.test_code_ran_here = False code = """ import sys sys.test_code_ran_here = True """ maintenance_utils.UploadSignedConfigBlob( code, aff4_path="aff4:/config/python_hacks/test", token=self.token) for _ in test_lib.TestFlowHelper("ExecutePythonHack", client_mock, client_id=self.client_id, hack_name="test", token=self.token): pass self.assertTrue(sys.test_code_ran_here)
def testGlobDirectory(self): """Test that glob expands directories.""" # Add some usernames we can interpolate later. client = aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) kb = client.Get(client.Schema.KNOWLEDGE_BASE) kb.MergeOrAddUser( rdf_client.User(username="******", appdata="test_data/index.dat")) kb.MergeOrAddUser( rdf_client.User(username="******", appdata="test_data/History")) # This is a record which means something to the interpolation system. We # should not process this especially. kb.MergeOrAddUser(rdf_client.User(username="******", appdata="%%PATH%%")) client.Set(kb) client.Close() client_mock = action_mocks.ActionMock("Find", "StatFile") # This glob selects all files which start with the username on this system. path = os.path.join(os.path.dirname(self.base_path), "%%users.appdata%%") # Run the flow. for _ in test_lib.TestFlowHelper("Glob", client_mock, client_id=self.client_id, paths=[path], token=self.token): pass path = self.client_id.Add("fs/os").Add(self.base_path).Add("index.dat") aff4.FACTORY.Open(path, aff4_type=aff4_grr.VFSFile, token=self.token) path = self.client_id.Add("fs/os").Add(self.base_path).Add("index.dat") aff4.FACTORY.Open(path, aff4_type=aff4_grr.VFSFile, token=self.token)
def testCollectRunKeyBinaries(self): """Read Run key from the client_fixtures to test parsing and storage.""" test_lib.ClientFixture(self.client_id, token=self.token) client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") client.Set(client.Schema.SYSTEM("Windows")) client.Set(client.Schema.OS_VERSION("6.2")) client.Flush() with test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, test_lib.FakeFullVFSHandler): client_mock = action_mocks.ActionMock( file_fingerprint.FingerprintFile, searching.Find, standard.StatFile, ) # Get KB initialized for _ in test_lib.TestFlowHelper("KnowledgeBaseInitializationFlow", client_mock, client_id=self.client_id, token=self.token): pass with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: # Run the flow in the emulated way. for _ in test_lib.TestFlowHelper("CollectRunKeyBinaries", client_mock, client_id=self.client_id, token=self.token): pass # Check MultiGetFile got called for our runkey file download_requested = False for pathspec in getfile_instrument.args[0][0].args.pathspecs: if pathspec.path == u"C:\\Windows\\TEMP\\A.exe": download_requested = True self.assertTrue(download_requested)
def testEndToEndTests(self): self.client_ids = [ "aff4:/C.6000000000000000", "aff4:/C.6000000000000001", "aff4:/C.6000000000000002" ] for clientid in self.client_ids: self._SetSummaries(clientid) self.client_mock = action_mocks.ActionMock("ListDirectory", "StatFile") config_lib.CONFIG.Set("Test.end_to_end_client_ids", self.client_ids) with utils.MultiStubber( (base.AutomatedTest, "classes", { "MockEndToEndTest": endtoend_test.MockEndToEndTest }), (system.EndToEndTests, "lifetime", 0)): # The test harness doesn't understand the callstate at a later time that # this flow is doing, so we need to disable check_flow_errors. for _ in test_lib.TestFlowHelper("EndToEndTests", self.client_mock, client_id=self.client_id, check_flow_errors=False, token=self.token): pass test_lib.TestHuntHelperWithMultipleMocks({}, check_flow_errors=False, token=self.token) hunt_ids = list( aff4.FACTORY.Open("aff4:/hunts", token=self.token).ListChildren()) # We have only created one hunt, and we should have started with clean aff4 # space. self.assertEqual(len(hunt_ids), 1) hunt_obj = aff4.FACTORY.Open(hunt_ids[0], token=self.token, age=aff4.ALL_TIMES) self.assertItemsEqual(sorted(hunt_obj.GetClients()), sorted(self.client_ids))
def testKnowledgeBaseRetrievalLinuxPasswd(self): """Check we can retrieve a Linux kb.""" test_lib.ClientFixture(self.client_id, token=self.token) vfs.VFS_HANDLERS[ rdfvalue.PathSpec.PathType.OS] = test_lib.FakeTestDataVFSHandler client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "Find", "HashBuffer", "ListDirectory", "FingerprintFile", "Grep") self.SetLinuxClient() config_lib.CONFIG.Set( "Artifacts.knowledge_base", ["LinuxWtmp", "LinuxPasswdHomedirs", "LinuxRelease"]) config_lib.CONFIG.Set("Artifacts.knowledge_base_additions", []) config_lib.CONFIG.Set("Artifacts.knowledge_base_skip", []) for _ in test_lib.TestFlowHelper("KnowledgeBaseInitializationFlow", client_mock, client_id=self.client_id, token=self.token): pass client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") kb = artifact.GetArtifactKnowledgeBase(client) self.assertEqual(kb.os_major_version, 14) self.assertEqual(kb.os_minor_version, 4) # user 1,2,3 from wtmp. # Bert and Ernie not present (Users fixture overriden by kb). self.assertItemsEqual([x.username for x in kb.users], ["user1", "user2", "user3"]) user = kb.GetUser(username="******") self.assertEqual(user.last_logon.AsSecondsFromEpoch(), 1296552099) self.assertEqual(user.homedir, "/home/user1") user = kb.GetUser(username="******") self.assertEqual(user.last_logon.AsSecondsFromEpoch(), 1296552102) self.assertEqual(user.homedir, "/home/user2") self.assertFalse(kb.GetUser(username="******"))
def testDownloadDirectory(self): """Test a FileFinder flow with depth=1.""" with test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, test_lib.ClientVFSHandlerFixture): # Mock the client actions FileFinder uses. client_mock = action_mocks.ActionMock( "FingerprintFile", "HashBuffer", "HashFile", "StatFile", "Find", "TransferBuffer") for _ in test_lib.TestFlowHelper( "FileFinder", client_mock, client_id=self.client_id, paths=["/c/Downloads/*"], action=file_finder.FileFinderAction( action_type=file_finder.FileFinderAction.Action.DOWNLOAD), token=self.token): pass # Check if the base path was created output_path = self.client_id.Add("fs/os/c/Downloads") output_fd = aff4.FACTORY.Open(output_path, token=self.token) children = list(output_fd.OpenChildren()) # There should be 6 children: expected_children = u"a.txt b.txt c.txt d.txt sub1 中国新闻网新闻中.txt" self.assertEqual(len(children), 6) self.assertEqual(expected_children.split(), sorted([child.urn.Basename() for child in children])) # Find the child named: a.txt for child in children: if child.urn.Basename() == "a.txt": break # Check the AFF4 type of the child, it should have changed # from VFSFile to VFSBlobImage self.assertEqual(child.__class__.__name__, "VFSBlobImage")
def testGlobWithStarStarRootPath(self): """Test ** expressions with root_path.""" # Add some usernames we can interpolate later. client = aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) kb = client.Get(client.Schema.KNOWLEDGE_BASE) kb.MergeOrAddUser(rdf_client.User(username="******")) kb.MergeOrAddUser(rdf_client.User(username="******")) client.Set(kb) client.Close() client_mock = action_mocks.ActionMock("Find", "StatFile") # Glob for foo at a depth of 4. path = os.path.join("foo**4") root_path = rdf_paths.PathSpec( path=os.path.join(self.base_path, "test_img.dd"), pathtype=rdf_paths.PathSpec.PathType.OS) root_path.Append(path="/", pathtype=rdf_paths.PathSpec.PathType.TSK) # Run the flow. for _ in test_lib.TestFlowHelper("Glob", client_mock, client_id=self.client_id, paths=[path], root_path=root_path, pathtype=rdf_paths.PathSpec.PathType.OS, token=self.token): pass output_path = self.client_id.Add("fs/tsk").Add(self.base_path.replace( "\\", "/")).Add("test_img.dd/glob_test/a/b") children = [] fd = aff4.FACTORY.Open(output_path, token=self.token) for child in fd.ListChildren(): children.append(child.Basename()) # We should find some files. self.assertEqual(children, ["foo"])
def testMACTimes(self): """Test that the timelining works with files.""" with test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, test_lib.ClientVFSHandlerFixture): client_mock = action_mocks.ActionMock("ListDirectory") pathspec = rdf_paths.PathSpec( path="/", pathtype=rdf_paths.PathSpec.PathType.OS) for _ in test_lib.TestFlowHelper("RecursiveListDirectory", client_mock, client_id=self.client_id, pathspec=pathspec, token=self.token): pass # Now make a timeline for s in test_lib.TestFlowHelper("MACTimes", client_mock, client_id=self.client_id, token=self.token, path="/"): session_id = s fd = aff4.FACTORY.Open(session_id.Add(flow_runner.RESULTS_SUFFIX), token=self.token) timestamp = 0 events = list(fd.Query("event.stat.pathspec.path contains grep")) for event in events: # Check the times are monotonously increasing. self.assertGreaterEqual(event.event.timestamp, timestamp) timestamp = event.event.timestamp self.assertIn("grep", event.event.stat.pathspec.path) # 9 files, each having mac times = 27 events. self.assertEqual(len(events), 27)
def testCollectionOverwriting(self): """Test we overwrite the collection every time the flow is executed.""" client_mock = action_mocks.ActionMock("Find") output_path = "analysis/FindFlowTest5" # Prepare a findspec. findspec = rdfvalue.FindSpec() findspec.path_regex = "bin" findspec.pathspec.path = "/" findspec.pathspec.pathtype = rdfvalue.PathSpec.PathType.OS for _ in test_lib.TestFlowHelper("FindFiles", client_mock, client_id=self.client_id, token=self.token, findspec=findspec, output=output_path): pass # Check the output file with the right number of results. fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token) self.assertEqual(len(fd), 2) # Now find a new result, should overwrite the collection findspec.path_regex = "dd" for _ in test_lib.TestFlowHelper("FindFiles", client_mock, client_id=self.client_id, token=self.token, findspec=findspec, output=output_path, max_results=1): pass fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token) self.assertEqual(len(fd), 1)
def testVFSFileStartsNewMultiGetFileWhenLockingFlowHasFinished(self): """A new MultiFileGet can be started when the locking flow has finished.""" client_id = self.SetupClients(1)[0] test_lib.ClientFixture(client_id, token=self.token) # We need to choose a file path having a pathsepc. path = "fs/os/c/bin/bash" with aff4.FACTORY.Create( client_id.Add(path), aff4_type=aff4_grr.VFSFile, mode="rw", token=self.token) as file_fd: # Starts a MultiGetFile flow. first_update_flow_urn = file_fd.Update() # Check that there is exactly one flow on the client. flows_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flows = list(flows_fd.ListChildren()) self.assertEqual(len(flows), 1) # Finish the flow holding the lock. client_mock = action_mocks.ActionMock() for _ in test_lib.TestFlowHelper( flows[0], client_mock, client_id=client_id, token=self.token): pass # The flow holding the lock has finished, so Update() should start a new # flow. second_update_flow_urn = file_fd.Update() # There should be two flows now. flows_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flows = list(flows_fd.ListChildren()) self.assertEqual(len(flows), 2) # Make sure that each Update() started a new flow and that the second flow # is holding the lock. self.assertNotEqual(first_update_flow_urn, second_update_flow_urn) self.assertEqual(second_update_flow_urn, file_fd.Get(file_fd.Schema.CONTENT_LOCK))
def testExportTabIsEnabledForStatEntryResults(self): client_id = rdfvalue.ClientURN("C.0000000000000001") with self.ACLChecksDisabled(): for _ in test_lib.TestFlowHelper("FlowWithOneStatEntryResult", action_mocks.ActionMock(), client_id=client_id, token=self.token): pass self.GrantClientApproval(client_id) self.Open("/#c=C.0000000000000001") self.Click("css=a:contains('Manage launched flows')") self.Click("css=td:contains('FlowWithOneStatEntryResult')") self.Click("css=#Export") self.WaitUntil( self.IsTextPresent, "--username test --reason 'Running tests' collection_files " "--path aff4:/C.0000000000000001/analysis/FlowWithOneStatEntryResult" )
def testKnowledgeBaseMultiProvides(self): """Check we can handle multi-provides.""" self.SetupWindowsMocks() # Replace some artifacts with test one that will run the MultiProvideParser. self.LoadTestArtifacts() config_lib.CONFIG.Set("Artifacts.knowledge_base", ["DepsProvidesMultiple"]) client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "Find", "HashBuffer", "ListDirectory", "FingerprintFile") for _ in test_lib.TestFlowHelper("KnowledgeBaseInitializationFlow", client_mock, client_id=self.client_id, token=self.token): pass # The client should now be populated with the data we care about. client = aff4.FACTORY.Open(self.client_id, token=self.token) kb = artifact.GetArtifactKnowledgeBase(client) self.assertEqual(kb.environ_temp, "tempvalue") self.assertEqual(kb.environ_path, "pathvalue")
def setUp(self): super(ApiGetHuntFilesArchiveHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetHuntFilesArchiveHandler() self.hunt = hunts.GRRHunt.StartHunt( hunt_name="GenericHunt", flow_runner_args=flow_runner.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=file_finder.FileFinderArgs( paths=[os.path.join(self.base_path, "test.plist")], action=file_finder.FileFinderAction(action_type="DOWNLOAD"), ), client_rate=0, token=self.token) self.hunt.Run() client_ids = self.SetupClients(10) self.AssignTasksToClients(client_ids=client_ids) action_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "HashFile", "HashBuffer") test_lib.TestHuntHelper(action_mock, client_ids, token=self.token)
def testGlob(self): """Test that glob works properly.""" # Add some usernames we can interpolate later. client = aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) users = client.Schema.USER() users.Append(username="******") users.Append(username="******") client.Set(users) client.Close() client_mock = action_mocks.ActionMock("Find", "StatFile") # This glob selects all files which start with the username on this system. paths = [os.path.join(self.base_path, "%%Users.username%%*"), os.path.join(self.base_path, "wtmp")] # Set iterator really low to force iteration. with utils.Stubber(filesystem.Glob, "FILE_MAX_PER_DIR", 2): for _ in test_lib.TestFlowHelper( "Glob", client_mock, client_id=self.client_id, paths=paths, pathtype=rdfvalue.PathSpec.PathType.OS, token=self.token, sync=False, check_flow_errors=False): pass output_path = self.client_id.Add("fs/os").Add( self.base_path.replace("\\", "/")) children = [] fd = aff4.FACTORY.Open(output_path, token=self.token) for child in fd.ListChildren(): children.append(child.Basename()) # We should find some files. self.assertEqual(sorted(children), sorted(["syslog", "syslog_compress.gz", "syslog_false.gz", "test_artifacts.json", "test_artifact.json", "test_img.dd", "test.plist", "tests", "tests_long", "wtmp"]))
def testRegistryMRU(self): """Test that the MRU discovery flow. Flow is a work in Progress.""" # Install the mock vfs.VFS_HANDLERS[rdf_paths.PathSpec.PathType. REGISTRY] = test_lib.FakeRegistryVFSHandler # Mock out the Find client action. client_mock = action_mocks.ActionMock("Find") # Add some user accounts to this client. fd = aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) users = fd.Schema.USER() users.Append( rdf_client.User(username="******", domain="testing-PC", homedir=r"C:\Users\testing", sid="S-1-5-21-2911950750-476812067-" "1487428992-1001")) fd.Set(users) fd.Close() # Run the flow in the emulated way. for _ in test_lib.TestFlowHelper("GetMRU", client_mock, client_id=self.client_id, token=self.token): pass # Check that the key was read. fd = aff4.FACTORY.Open(rdfvalue.RDFURN(self.client_id).Add( "registry/HKEY_USERS/S-1-5-21-2911950750-476812067-1487428992-1001/" "Software/Microsoft/Windows/CurrentVersion/Explorer/" "ComDlg32/OpenSavePidlMRU/dd/0"), token=self.token) self.assertEqual(fd.__class__.__name__, "VFSFile") s = fd.Get(fd.Schema.STAT) # TODO(user): Make this test better when the MRU flow is complete. self.assertTrue(s.registry_data)
def setUp(self): super(TestEndToEndTestFlow, self).setUp() install_time = rdfvalue.RDFDatetime().Now() user = "******" userobj = rdf_client.User(username=user) interface = rdf_client.Interface(ifname="eth0") self.client = aff4.FACTORY.Create(self.client_id, "VFSGRRClient", mode="rw", token=self.token, age=aff4.ALL_TIMES) self.client.Set(self.client.Schema.HOSTNAME("hostname")) self.client.Set(self.client.Schema.SYSTEM("Linux")) self.client.Set(self.client.Schema.OS_RELEASE("debian")) self.client.Set(self.client.Schema.OS_VERSION("14.04")) self.client.Set(self.client.Schema.KERNEL("3.15-rc2")) self.client.Set(self.client.Schema.FQDN("hostname.example.com")) self.client.Set(self.client.Schema.ARCH("x86_64")) self.client.Set(self.client.Schema.INSTALL_DATE(install_time)) self.client.Set(self.client.Schema.USER([userobj])) self.client.Set(self.client.Schema.USERNAMES([user])) self.client.Set(self.client.Schema.LAST_INTERFACES([interface])) self.client.Flush() self.client_mock = action_mocks.ActionMock("ListDirectory", "StatFile")
def testKnowledgeBaseRetrievalLinux(self): """Check we can retrieve a Linux kb.""" test_lib.ClientFixture(self.client_id, token=self.token) self.SetLinuxClient() with test_lib.ConfigOverrider({ "Artifacts.knowledge_base": [ "LinuxWtmp", "NetgroupConfiguration", "LinuxPasswdHomedirs", "LinuxRelease" ], "Artifacts.netgroup_filter_regexes": ["^login$"], "Artifacts.netgroup_user_blacklist": ["isaac"] }): with test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, test_lib.FakeTestDataVFSHandler): client_mock = action_mocks.ActionMock( "TransferBuffer", "StatFile", "Find", "HashBuffer", "ListDirectory", "FingerprintFile", "Grep") for _ in test_lib.TestFlowHelper( "KnowledgeBaseInitializationFlow", client_mock, client_id=self.client_id, token=self.token): pass client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") kb = artifact.GetArtifactKnowledgeBase(client) self.assertEqual(kb.os_major_version, 14) self.assertEqual(kb.os_minor_version, 4) # user 1,2,3 from wtmp. yagharek from netgroup. # Bert and Ernie not present (Users fixture overriden by kb). self.assertItemsEqual([x.username for x in kb.users], ["user1", "user2", "user3", "yagharek"]) user = kb.GetUser(username="******") self.assertEqual(user.last_logon.AsSecondsFromEpoch(), 1296552099) self.assertEqual(user.homedir, "/home/user1")
def testEndToEndTestsResultChecking(self): self.client_ids = ["aff4:/C.6000000000000000", "aff4:/C.6000000000000001", "aff4:/C.6000000000000002"] for clientid in self.client_ids: self._SetSummaries(clientid) self.client_mock = action_mocks.ActionMock("ListDirectory", "StatFile") endtoend = system.EndToEndTests(None, token=self.token) endtoend.state.Register("hunt_id", "aff4:/temphuntid") endtoend.state.Register("client_ids", set(self.client_ids)) endtoend.state.Register("client_ids_failures", set()) endtoend.state.Register("client_ids_result_reported", set()) # No results at all self.assertRaises(flow.FlowError, endtoend._CheckForSuccess, []) # Not enough client results endtoend.state.Register("client_ids_failures", set()) endtoend.state.Register("client_ids_result_reported", set()) self.assertRaises(flow.FlowError, endtoend._CheckForSuccess, [self._CreateResult(True, "aff4:/C.6000000000000001")]) # All clients succeeded endtoend.state.Register("client_ids_failures", set()) endtoend.state.Register("client_ids_result_reported", set()) endtoend._CheckForSuccess([self._CreateResult( True, "aff4:/C.6000000000000000"), self._CreateResult( True, "aff4:/C.6000000000000001"), self._CreateResult( True, "aff4:/C.6000000000000002")]) # All clients complete, but some failures endtoend.state.Register("client_ids_failures", set()) endtoend.state.Register("client_ids_result_reported", set()) self.assertRaises(flow.FlowError, endtoend._CheckForSuccess, [self._CreateResult(True, "aff4:/C.6000000000000000"), self._CreateResult(False, "aff4:/C.6000000000000001"), self._CreateResult(False, "aff4:/C.6000000000000002")])
def ReadFromSparseImage(self, length, offset): fd = self.CreateNewSparseImage() urn = fd.urn self.client_mock = action_mocks.ActionMock("FingerprintFile", "HashBuffer", "HashFile", "StatFile", "Find", "TransferBuffer", "ReadBuffer") for _ in test_lib.TestFlowHelper("FetchBufferForSparseImage", self.client_mock, client_id=self.client_id, token=self.token, file_urn=urn, length=length, offset=offset): pass # Reopen the object so we can read the freshest version of the size # attribute. fd = aff4.FACTORY.Open(urn, token=self.token) return fd
def testMultiGetFileSizeLimit(self): client_mock = action_mocks.ActionMock("TransferBuffer", "HashFile", "StatFile", "HashBuffer") image_path = os.path.join(self.base_path, "test_img.dd") pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS, path=image_path) # Read a bit more than one chunk (600 * 1024). expected_size = 750 * 1024 args = transfer.MultiGetFileArgs(pathspecs=[pathspec], file_size=expected_size) for _ in test_lib.TestFlowHelper("MultiGetFile", client_mock, token=self.token, client_id=self.client_id, args=args): pass urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN( pathspec, self.client_id) blobimage = aff4.FACTORY.Open(urn, token=self.token) # Make sure a VFSBlobImage got written. self.assertTrue(isinstance(blobimage, aff4_grr.VFSBlobImage)) self.assertEqual(len(blobimage), expected_size) data = blobimage.read(100 * expected_size) self.assertEqual(len(data), expected_size) expected_data = open(image_path, "rb").read(expected_size) self.assertEqual(data, expected_data) hash_obj = blobimage.Get(blobimage.Schema.HASH) d = hashlib.sha1() d.update(expected_data) expected_hash = d.hexdigest() self.assertEqual(hash_obj.sha1, expected_hash)
def testCallClientWellKnown(self): """Well known flows can also call the client.""" cls = flow.GRRFlow.classes["GetClientStatsAuto"] flow_obj = cls(cls.well_known_session_id, mode="rw", token=self.token) flow_obj.CallClient(self.client_id, admin.GetClientStats) # Check that a message went out to the client manager = queue_manager.QueueManager(token=self.token) tasks = manager.Query(self.client_id, limit=100) self.assertEqual(len(tasks), 1) message = tasks[0] # If we don't specify where to send the replies, they go to the devnull flow devnull = flow.GRRFlow.classes["IgnoreResponses"] self.assertEqual(message.session_id, devnull.well_known_session_id) self.assertEqual(message.request_id, 0) self.assertEqual(message.name, admin.GetClientStats.__name__) messages = [] def StoreMessage(_, msg): messages.append(msg) with utils.Stubber(devnull, "ProcessMessage", StoreMessage): client_mock = action_mocks.ActionMock(admin.GetClientStats) for _ in test_lib.TestFlowHelper( "ClientActionRunner", client_mock, client_id=self.client_id, action="GetClientStats", token=self.token): pass # Make sure the messages arrived. self.assertEqual(len(messages), 1)
def testFindWithMaxFiles(self): """Test that the Find flow works when specifying proto directly.""" client_mock = action_mocks.ActionMock("Find") output_path = "analysis/FindFlowTest4" # Prepare a findspec. findspec = rdf_client.FindSpec( path_regex=".*", pathspec=rdf_paths.PathSpec(path="/", pathtype=rdf_paths.PathSpec.PathType.OS)) for _ in test_lib.TestFlowHelper( "FindFiles", client_mock, client_id=self.client_id, token=self.token, findspec=findspec, iteration_count=3, output=output_path, max_results=7): pass # Check the output file is created fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token) # Make sure we got the right number of results. self.assertEqual(len(fd), 7)
def testFlowLogging(self): """Check that flows log correctly.""" flow_urn = None for session_id in test_lib.TestFlowHelper("DummyLogFlow", action_mocks.ActionMock(), token=self.token, client_id=self.client_id): flow_urn = session_id with aff4.FACTORY.Open(flow_urn.Add("Logs"), age=aff4.ALL_TIMES, token=self.token) as log_collection: count = 0 # Can't use len with PackedVersionCollection for log in log_collection: self.assertEqual(log.client_id, self.client_id) self.assertTrue(log.log_message in ["First", "Second", "Third", "Fourth", "Uno", "Dos", "Tres", "Cuatro"]) self.assertTrue(log.flow_name in ["DummyLogFlow", "DummyLogFlowChild"]) self.assertTrue(str(flow_urn) in str(log.urn)) count += 1 self.assertEqual(count, 8)
def testRunGrrClientActionArtifact(self): """Test we can get a GRR client artifact.""" client_mock = action_mocks.ActionMock("ListProcesses") client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") client.Set(client.Schema.SYSTEM("Linux")) client.Flush() coll1 = artifact_registry.ArtifactSource( type=artifact_registry.ArtifactSource.SourceType.GRR_CLIENT_ACTION, attributes={"client_action": r"ListProcesses"}) self.fakeartifact.sources.append(coll1) artifact_list = ["FakeArtifact"] for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id, output="test_artifact"): pass # Test the AFF4 file that was created. fd = aff4.FACTORY.Open(rdfvalue.RDFURN(self.client_id).Add("test_artifact"), token=self.token) self.assertTrue(isinstance(list(fd)[0], rdf_client.Process)) self.assertTrue(len(fd) > 5)
def RunFlow(self, keys_paths=None, conditions=None): if keys_paths is None: keys_paths = [ "HKEY_USERS/S-1-5-20/Software/Microsoft/" "Windows/CurrentVersion/Run/*" ] if conditions is None: conditions = [] client_mock = action_mocks.ActionMock( searching.Find, searching.Grep, ) for s in test_lib.TestFlowHelper("RegistryFinder", client_mock, client_id=self.client_id, keys_paths=keys_paths, conditions=conditions, token=self.token): session_id = s return session_id
def ReadTestImage(self, size_threshold): path = os.path.join(self.base_path, "test_img.dd") urn = rdfvalue.RDFURN(self.client_id.Add("fs/os").Add(path)) pathspec = rdf_paths.PathSpec(path=path, pathtype=rdf_paths.PathSpec.PathType.OS) client_mock = action_mocks.ActionMock("FingerprintFile", "HashBuffer", "HashFile", "StatFile", "Find", "TransferBuffer", "ReadBuffer") # Get everything as an AFF4SparseImage for _ in test_lib.TestFlowHelper("MakeNewAFF4SparseImage", client_mock, client_id=self.client_id, token=self.token, size_threshold=size_threshold, pathspec=pathspec): pass fd = aff4.FACTORY.Open(urn, token=self.token) return fd
def testGetFile(self): """Test that the GetFile flow works.""" client_mock = action_mocks.ActionMock("TransferBuffer", "StatFile") pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join(self.base_path, "test_img.dd")) for _ in test_lib.TestFlowHelper("GetFile", client_mock, token=self.token, client_id=self.client_id, pathspec=pathspec): pass # Fix path for Windows testing. pathspec.path = pathspec.path.replace("\\", "/") # Test the AFF4 file that was created. urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(pathspec, self.client_id) fd1 = aff4.FACTORY.Open(urn, token=self.token) fd2 = open(pathspec.path) fd2.seek(0, 2) self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE))) self.CompareFDs(fd1, fd2)
def testSearchFileContentsNoGrep(self): """Search files without a grep specification.""" pattern = "searching/*.log" client_mock = action_mocks.ActionMock("Find", "Grep", "StatFile") path = os.path.join(self.base_path, pattern) # Do not provide a Grep expression - should match all files. args = grep.SearchFileContentArgs(paths=[path]) # Run the flow. for s in test_lib.TestFlowHelper( "SearchFileContent", client_mock, client_id=self.client_id, args=args, token=self.token): session_id = s fd = aff4.FACTORY.Open( session_id.Add(flow_runner.RESULTS_SUFFIX), token=self.token) self.assertEqual(len(fd), 3)
def testArtifactSkipping(self): client_mock = action_mocks.ActionMock() client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") # This does not match the Artifact so it will not be collected. client.Set(client.Schema.SYSTEM("Windows")) kb = client.Get(client.Schema.KNOWLEDGE_BASE) kb.os = "Windows" client.Set(client.Schema.KNOWLEDGE_BASE, kb) client.Flush() artifact_list = ["FakeArtifact"] for s in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock, artifact_list=artifact_list, use_tsk=False, token=self.token, client_id=self.client_id): session_id = s flow_obj = aff4.FACTORY.Open(session_id, token=self.token) self.assertEqual( len(flow_obj.state.artifacts_skipped_due_to_condition), 1) self.assertEqual(flow_obj.state.artifacts_skipped_due_to_condition[0], ["FakeArtifact", "os == 'Linux'"])
def testClickingOnInterrogateStartsInterrogateFlow(self): self.Open("/#c=" + self.client_id) # A click on the Interrogate button starts a flow, disables the button and # shows a loading icon within the button. self.Click("css=button:contains('Interrogate')") self.WaitUntil(self.IsElementPresent, "css=button:contains('Interrogate')[disabled]") self.WaitUntil(self.IsElementPresent, "css=button:contains('Interrogate') i") # Get the started flow and finish it, this will re-enable the button. with self.ACLChecksDisabled(): client_id = rdf_client.ClientURN(self.client_id) fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flows = list(fd.ListChildren()) client_mock = action_mocks.ActionMock() for flow_urn in flows: for _ in test_lib.TestFlowHelper(flow_urn, client_mock, client_id=client_id, token=self.token, check_flow_errors=False): pass self.WaitUntilNot(self.IsElementPresent, "css=button:contains('Interrogate')[disabled]") # Check if an Interrogate flow was started. self.Click("css=a[grrtarget='client.flows']") self.Click("css=td:contains('Interrogate')") self.WaitUntilContains( "Interrogate", self.GetText, "css=table td.proto_key:contains('Flow name') " "~ td.proto_value")