def testExecuteBinariesWithArgs(self): client_mock = action_mocks.ActionMock(standard.ExecuteBinaryCommand) code = b"I am a binary file" upload_path = signed_binary_utils.GetAFF4ExecutablesRoot().Add( config.CONFIG["Client.platform"]).Add("test.exe") maintenance_utils.UploadSignedConfigBlob(code, aff4_path=upload_path) binary_urn = rdfvalue.RDFURN(upload_path) blob_iterator, _ = signed_binary_utils.FetchBlobsForSignedBinaryByURN( binary_urn) # There should be only a single part to this binary. self.assertLen(list(blob_iterator), 1) # This flow has an acl, the user needs to be admin. acl_test_lib.CreateAdminUser(self.token.username) with utils.Stubber(subprocess, "Popen", client_test_lib.Popen): flow_test_lib.TestFlowHelper(administrative.LaunchBinary.__name__, client_mock, client_id=self.SetupClient(0), binary=upload_path, command_line="--value 356", token=self.token) # Check that the executable file contains the code string. self.assertEqual(client_test_lib.Popen.binary, code) # At this point, the actual binary should have been cleaned up by the # client action so it should not exist. self.assertRaises(IOError, open, client_test_lib.Popen.running_args[0]) # Check the binary was run with the correct command line. self.assertEqual(client_test_lib.Popen.running_args[1], "--value") self.assertEqual(client_test_lib.Popen.running_args[2], "356") # Check the command was in the tmp file. self.assertStartsWith(client_test_lib.Popen.running_args[0], config.CONFIG["Client.tempdir_roots"][0])
def _RunYaraProcessScan(self, procs, ignore_grr_process=False, include_errors_in_results=False, include_misses_in_results=False, max_results_per_process=0, **kw): client_mock = action_mocks.ActionMock(yara_actions.YaraProcessScan) with utils.MultiStubber( (psutil, "process_iter", lambda: procs), (psutil, "Process", functools.partial(self.process, procs)), (client_utils, "OpenProcessForMemoryAccess", lambda pid: FakeMemoryProcess(pid=pid))): for s in flow_test_lib.TestFlowHelper( yara_flows.YaraProcessScan.__name__, client_mock, yara_signature=test_yara_signature, client_id=self.client_id, ignore_grr_process=ignore_grr_process, include_errors_in_results=include_errors_in_results, include_misses_in_results=include_misses_in_results, max_results_per_process=max_results_per_process, token=self.token, **kw): session_id = s flow_obj = aff4.FACTORY.Open(session_id) results = flow_obj.TypedResultCollection() matches = [ x[1].payload for x in results.ScanByType(rdf_yara.YaraProcessScanMatch.__name__) ] errors = [ x[1].payload for x in results.ScanByType(rdf_yara.YaraProcessError.__name__) ] misses = [ x[1].payload for x in results.ScanByType(rdf_yara.YaraProcessScanMiss.__name__) ] return (matches, errors, misses)
def testFlowLogging(self): """Check that flows log correctly.""" flow_urn = flow_test_lib.TestFlowHelper( flow_test_lib.DummyLogFlow.__name__, action_mocks.ActionMock(), token=self.token, client_id=self.client_id) log_collection = flow.GRRFlow.LogCollectionForFID(flow_urn) self.assertEqual(len(log_collection), 8) for log in log_collection: self.assertEqual(log.client_id, self.client_id) self.assertTrue(log.log_message in [ "First", "Second", "Third", "Fourth", "Uno", "Dos", "Tres", "Cuatro" ]) self.assertTrue(log.flow_name in [ flow_test_lib.DummyLogFlow.__name__, flow_test_lib.DummyLogFlowChild.__name__ ]) self.assertTrue(str(flow_urn) in str(log.urn))
def testExecutePythonHackWithArgs(self): client_mock = action_mocks.ActionMock(standard.ExecutePython) sys.test_code_ran_here = 1234 code = "import sys\nsys.test_code_ran_here = py_args['value']\n" client_id = self.SetupClient(0) maintenance_utils.UploadSignedConfigBlob( code.encode("utf-8"), aff4_path="aff4:/config/python_hacks/test", token=self.token) flow_test_lib.TestFlowHelper(administrative.ExecutePythonHack.__name__, client_mock, client_id=client_id, hack_name="test", py_args=dict(value=5678), token=self.token) self.assertEqual(sys.test_code_ran_here, 5678)
def testArtifactSkipping(self): client_mock = action_mocks.ActionMock() # This does not match the Artifact so it will not be collected. client_id = self.SetupClient(0, system="Windows") artifact_list = ["FakeArtifact"] session_id = flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, use_tsk=False, token=self.token, client_id=client_id) flow_obj = data_store.REL_DB.ReadFlowObject(client_id, session_id) state = flow_obj.persistent_data self.assertLen(state.artifacts_skipped_due_to_condition, 1) self.assertEqual(state.artifacts_skipped_due_to_condition[0], ["FakeArtifact", "os == 'Linux'"])
def testExecutePythonHack(self): client_mock = action_mocks.ActionMock(standard.ExecutePython) # This is the code we test. If this runs on the client mock we can check for # this attribute. sys.test_code_ran_here = False code = """ import sys sys.test_code_ran_here = True """ maintenance_utils.UploadSignedConfigBlob( code, aff4_path="aff4:/config/python_hacks/test", token=self.token) flow_test_lib.TestFlowHelper(administrative.ExecutePythonHack.__name__, client_mock, client_id=test_lib.TEST_CLIENT_ID, hack_name="test", token=self.token) self.assertTrue(sys.test_code_ran_here)
def testExecutePythonHackWithArgs(self): client_mock = action_mocks.ActionMock(standard.ExecutePython) sys.test_code_ran_here = 1234 code = """ import sys sys.test_code_ran_here = py_args['value'] """ maintenance_utils.UploadSignedConfigBlob( code, aff4_path="aff4:/config/python_hacks/test", token=self.token) for _ in flow_test_lib.TestFlowHelper( administrative.ExecutePythonHack.__name__, client_mock, client_id=test_lib.TEST_CLIENT_ID, hack_name="test", py_args=dict(value=5678), token=self.token): pass self.assertEqual(sys.test_code_ran_here, 5678)
def testCollectRunKeyBinaries(self): """Read Run key from the client_fixtures to test parsing and storage.""" client_id = self.SetupClient(0, system="Windows", os_version="6.2") with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.FakeFullVFSHandler): client_mock = action_mocks.ActionMock( file_fingerprint.FingerprintFile, searching.Find, standard.GetFileStat, ) # Get KB initialized session_id = flow_test_lib.TestFlowHelper( artifact.KnowledgeBaseInitializationFlow.__name__, client_mock, client_id=client_id, creator=self.test_username) kb = flow_test_lib.GetFlowResults(client_id, session_id)[0] client = data_store.REL_DB.ReadClientSnapshot(client_id) client.knowledge_base = kb data_store.REL_DB.WriteClientSnapshot(client) with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: # Run the flow in the emulated way. flow_test_lib.TestFlowHelper( registry.CollectRunKeyBinaries.__name__, client_mock, client_id=client_id, creator=self.test_username) # Check MultiGetFile got called for our runkey file download_requested = False for pathspec in getfile_instrument.args[0][0].args.pathspecs: if pathspec.path == u"C:\\Windows\\TEMP\\A.exe": download_requested = True self.assertTrue(download_requested)
def testExecuteBinariesWithArgs(self): client_mock = action_mocks.ActionMock(standard.ExecuteBinaryCommand) code = "I am a binary file" upload_path = config.CONFIG["Executables.aff4_path"].Add("test.exe") maintenance_utils.UploadSignedConfigBlob(code, aff4_path=upload_path, token=self.token) # This flow has an acl, the user needs to be admin. user = aff4.FACTORY.Create("aff4:/users/%s" % self.token.username, mode="rw", aff4_type=users.GRRUser, token=self.token) user.SetLabel("admin", owner="GRRTest") user.Close() with utils.Stubber(subprocess, "Popen", client_test_lib.Popen): flow_test_lib.TestFlowHelper(administrative.LaunchBinary.__name__, client_mock, client_id=test_lib.TEST_CLIENT_ID, binary=upload_path, command_line="--value 356", token=self.token) # Check that the executable file contains the code string. self.assertEqual(client_test_lib.Popen.binary, code) # At this point, the actual binary should have been cleaned up by the # client action so it should not exist. self.assertRaises(IOError, open, client_test_lib.Popen.running_args[0]) # Check the binary was run with the correct command line. self.assertEqual(client_test_lib.Popen.running_args[1], "--value") self.assertEqual(client_test_lib.Popen.running_args[2], "356") # Check the command was in the tmp file. self.assertTrue(client_test_lib.Popen.running_args[0].startswith( config.CONFIG["Client.tempdir_roots"][0]))
def testVFSFileStartsNewMultiGetFileWhenLockingFlowHasFinished(self): """A new MultiFileGet can be started when the locking flow has finished.""" client_id = self.SetupClient(0) path = "fs/os/c/bin/bash" with aff4.FACTORY.Create( client_id.Add(path), aff4_type=aff4_grr.VFSFile, mode="rw", token=self.token) as file_fd: file_fd.Set( file_fd.Schema.STAT, rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="/bin/bash", pathtype="OS"))) # Starts a MultiGetFile flow. first_update_flow_urn = file_fd.Update() # Check that there is exactly one flow on the client. flows_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flows = list(flows_fd.ListChildren()) self.assertEqual(len(flows), 1) # Finish the flow holding the lock. client_mock = action_mocks.ActionMock() flow_test_lib.TestFlowHelper( flows[0], client_mock, client_id=client_id, token=self.token) # The flow holding the lock has finished, so Update() should start a new # flow. second_update_flow_urn = file_fd.Update() # There should be two flows now. flows_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flows = list(flows_fd.ListChildren()) self.assertEqual(len(flows), 2) # Make sure that each Update() started a new flow and that the second flow # is holding the lock. self.assertNotEqual(first_update_flow_urn, second_update_flow_urn) self.assertEqual(second_update_flow_urn, file_fd.Get(file_fd.Schema.CONTENT_LOCK))
def testUpdateConfig(self): """Ensure we can retrieve and update the config.""" # Write a client without a proper system so we don't need to # provide the os specific artifacts in the interrogate flow below. client_id = self.SetupClient(0, system="") # Only mock the pieces we care about. client_mock = action_mocks.ActionMock(admin.GetConfiguration, admin.UpdateConfiguration) loc = "http://www.example.com" new_config = rdf_protodict.Dict({ "Client.server_urls": [loc], "Client.foreman_check_frequency": 3600, "Client.poll_min": 1 }) # Setting config options is disallowed in tests so we need to temporarily # revert this. with utils.Stubber(config.CONFIG, "Set", config.CONFIG.Set.old_target): # Write the config. for _ in flow_test_lib.TestFlowHelper( administrative.UpdateConfiguration.__name__, client_mock, client_id=client_id, token=self.token, config=new_config): pass # Now retrieve it again to see if it got written. for _ in flow_test_lib.TestFlowHelper(discovery.Interrogate.__name__, client_mock, token=self.token, client_id=client_id): pass fd = aff4.FACTORY.Open(client_id, token=self.token) config_dat = fd.Get(fd.Schema.GRR_CONFIGURATION) self.assertEqual(config_dat["Client.server_urls"], [loc]) self.assertEqual(config_dat["Client.poll_min"], 1)
def testRunGrrClientActionArtifact(self): """Test we can get a GRR client artifact.""" client_id = self.SetupClient(0, system="Linux") with utils.Stubber(psutil, "process_iter", ProcessIter): client_mock = action_mocks.ActionMock(standard.ListProcesses) coll1 = rdf_artifacts.ArtifactSource( type=rdf_artifacts.ArtifactSource.SourceType.GRR_CLIENT_ACTION, attributes={"client_action": standard.ListProcesses.__name__}) self.fakeartifact.sources.append(coll1) artifact_list = ["FakeArtifact"] session_id = flow_test_lib.TestFlowHelper( aff4_flows.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, token=self.token, client_id=client_id) results = flow_test_lib.GetFlowResults(client_id, session_id) self.assertIsInstance(results[0], rdf_client.Process) self.assertLen(results, 1)
def _RunYaraProcessScan(self, procs, ignore_grr_process=False, **kw): client_mock = action_mocks.ActionMock(yara_actions.YaraProcessScan) with utils.MultiStubber( (psutil, "process_iter", lambda: procs), (psutil, "Process", functools.partial(self.process, procs)), (client_utils, "OpenProcessForMemoryAccess", lambda pid: FakeMemoryProcess(pid=pid))): for s in flow_test_lib.TestFlowHelper( yara_flows.YaraProcessScan.__name__, client_mock, yara_signature=test_yara_signature, client_id=self.client_id, ignore_grr_process=ignore_grr_process, token=self.token, **kw): session_id = s flow_obj = aff4.FACTORY.Open(session_id) self.assertEqual(len(flow_obj.ResultCollection()), 1) return flow_obj.ResultCollection()[0]
def testRunGrrClientActionArtifactSplit(self): """Test that artifacts get split into separate collections.""" client_id = self.SetupClient(0, system="Linux") with utils.Stubber(psutil, "process_iter", ProcessIter): client_mock = action_mocks.ActionMock(standard.ListProcesses) coll1 = rdf_artifacts.ArtifactSource( type=rdf_artifacts.ArtifactSource.SourceType.GRR_CLIENT_ACTION, attributes={"client_action": standard.ListProcesses.__name__}) self.fakeartifact.sources.append(coll1) self.fakeartifact2.sources.append(coll1) artifact_list = ["FakeArtifact", "FakeArtifact2"] session_id = flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, token=self.token, client_id=client_id, split_output_by_artifact=True) results_by_tag = flow_test_lib.GetFlowResultsByTag(client_id, session_id) self.assertCountEqual(results_by_tag.keys(), ["artifact:FakeArtifact", "artifact:FakeArtifact2"])
def testExecutePythonHack(self): client_mock = action_mocks.ActionMock(standard.ExecutePython) # This is the code we test. If this runs on the client mock we can check for # this attribute. sys.test_code_ran_here = False client_id = self.SetupClient(0) code = """ import sys sys.test_code_ran_here = True """ maintenance_utils.UploadSignedConfigBlob( code.encode("utf-8"), aff4_path="aff4:/config/python_hacks/test") flow_test_lib.TestFlowHelper(administrative.ExecutePythonHack.__name__, client_mock, client_id=client_id, hack_name="test", creator=self.test_username) self.assertTrue(sys.test_code_ran_here)
def testYaraProcessScan_SingleSignatureShard(self): action_mock = action_mocks.ActionMock(memory_actions.YaraProcessScan) procs = [ p for p in self.procs if p.pid in [101, 102, 103, 104, 105, 106, 107] ] scan_params = { "include_misses_in_results": True, "include_errors_in_results": "ALL_ERRORS", "max_results_per_process": 0, "ignore_grr_process": False, } with test_lib.FakeTime( rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(123456789)): matches, errors, misses = self._RunYaraProcessScan( procs, action_mock=action_mock, **scan_params) # Verify scan results. self.assertLen(matches, 2) self.assertLen(errors, 2) self.assertLen(misses, 2) self.assertEqual(matches[0].match[0].rule_name, "test_rule") self.assertEqual(matches[0].match[0].string_matches[0].data, b"1234") flow = data_store.REL_DB.ReadAllFlowObjects( self.client_id, include_child_flows=False)[0] # We expect to have sent 1 YaraProcessScanRequest to the client. self.assertEqual(flow.next_outbound_id, 2) self.assertEqual( action_mock.recorded_messages[0].session_id.Basename(), flow.flow_id) scan_requests = action_mock.recorded_args["YaraProcessScan"] expected_request = rdf_memory.YaraProcessScanRequest( signature_shard=rdf_memory.YaraSignatureShard( index=0, payload=_TEST_YARA_SIGNATURE.encode("utf-8")), num_signature_shards=1, **scan_params) self.assertListEqual(scan_requests, [expected_request])
def _RunYaraProcessScan(self, procs, ignore_grr_process=False, include_errors_in_results=False, include_misses_in_results=False, max_results_per_process=0, **kw): client_mock = action_mocks.ActionMock(memory_actions.YaraProcessScan) with utils.MultiStubber( (psutil, "process_iter", lambda: procs), (psutil, "Process", functools.partial(self.process, procs)), (client_utils, "OpenProcessForMemoryAccess", lambda pid: FakeMemoryProcess(pid=pid))): session_id = flow_test_lib.TestFlowHelper( memory.YaraProcessScan.__name__, client_mock, yara_signature=test_yara_signature, client_id=self.client_id, ignore_grr_process=ignore_grr_process, include_errors_in_results=include_errors_in_results, include_misses_in_results=include_misses_in_results, max_results_per_process=max_results_per_process, token=self.token, **kw) res = flow_test_lib.GetFlowResults(self.client_id.Basename(), session_id) matches = [ r for r in res if isinstance(r, rdf_memory.YaraProcessScanMatch) ] errors = [ r for r in res if isinstance(r, rdf_memory.ProcessMemoryError) ] misses = [ r for r in res if isinstance(r, rdf_memory.YaraProcessScanMiss) ] return (matches, errors, misses)
def testCollectionOverwriting(self): """Test we overwrite the collection every time the flow is executed.""" client_mock = action_mocks.ActionMock(searching.Find) # Prepare a findspec. findspec = rdf_client.FindSpec() findspec.path_regex = "bin" findspec.pathspec.path = "/" findspec.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS for s in flow_test_lib.TestFlowHelper( find.FindFiles.__name__, client_mock, client_id=self.client_id, token=self.token, findspec=findspec): session_id = s # Check the results collection. fd = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertEqual(len(fd), 2) # Now find a new result, should overwrite the collection findspec.path_regex = "dd" for s in flow_test_lib.TestFlowHelper( find.FindFiles.__name__, client_mock, client_id=self.client_id, token=self.token, findspec=findspec, max_results=1): session_id = s # Check the results collection. fd = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertEqual(len(fd), 1)
def _RunUpdateFlow(self, client_id): # Get the flows that should have been started and finish them. fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flows = list(fd.ListChildren()) gui_test_lib.CreateFileVersion(client_id.Add("fs/os/c/a.txt"), "Hello World", timestamp=gui_test_lib.TIME_0, token=self.token) gui_test_lib.CreateFolder(client_id.Add("fs/os/c/TestFolder"), timestamp=gui_test_lib.TIME_0, token=self.token) gui_test_lib.CreateFolder(client_id.Add("fs/os/c/bin/TestBinFolder"), timestamp=gui_test_lib.TIME_0, token=self.token) client_mock = action_mocks.ActionMock() for flow_urn in flows: flow_test_lib.TestFlowHelper(flow_urn, client_mock, client_id=client_id, token=self.token, check_flow_errors=False)
def testRunGrrClientActionArtifact(self): """Test we can get a GRR client artifact.""" with utils.Stubber(psutil, "process_iter", ProcessIter): client_mock = action_mocks.ActionMock(standard.ListProcesses) client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") client.Set(client.Schema.SYSTEM("Linux")) client.Flush() coll1 = rdf_artifacts.ArtifactSource( type=rdf_artifacts.ArtifactSource.SourceType.GRR_CLIENT_ACTION, attributes={"client_action": standard.ListProcesses.__name__}) self.fakeartifact.sources.append(coll1) artifact_list = ["FakeArtifact"] session_id = flow_test_lib.TestFlowHelper( aff4_flows.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id) fd = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertTrue(isinstance(list(fd)[0], rdf_client.Process)) self.assertTrue(len(fd) == 1)
def testArtifactSkipping(self): client_mock = action_mocks.ActionMock() client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") # This does not match the Artifact so it will not be collected. client.Set(client.Schema.SYSTEM("Windows")) kb = client.Get(client.Schema.KNOWLEDGE_BASE) kb.os = "Windows" client.Set(client.Schema.KNOWLEDGE_BASE, kb) client.Flush() artifact_list = ["FakeArtifact"] session_id = flow_test_lib.TestFlowHelper( aff4_flows.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, use_tsk=False, token=self.token, client_id=self.client_id) flow_obj = aff4.FACTORY.Open(session_id, token=self.token) self.assertEqual(len(flow_obj.state.artifacts_skipped_due_to_condition), 1) self.assertEqual(flow_obj.state.artifacts_skipped_due_to_condition[0], ["FakeArtifact", "os == 'Linux'"])
def testRetrieveDependencies(self): """Test getting an artifact without a KB using retrieve_depdendencies.""" with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.REGISTRY, vfs_test_lib.FakeRegistryVFSHandler): with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.FakeFullVFSHandler): client_mock = action_mocks.ActionMock(standard.StatFile) artifact_list = ["WindowsEnvironmentVariableWinDir"] for s in flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id, dependencies=( artifact_utils.ArtifactCollectorFlowArgs.Dependency.FETCH_NOW)): session_id = s output = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertEqual(len(output), 1) self.assertEqual(output[0], r"C:\Windows")
def RunFlow(self, client_id, keys_paths=None, conditions=None): if keys_paths is None: keys_paths = [ "HKEY_USERS/S-1-5-20/Software/Microsoft/" "Windows/CurrentVersion/Run/*" ] if conditions is None: conditions = [] client_mock = action_mocks.ActionMock( searching.Find, searching.Grep, ) session_id = flow_test_lib.TestFlowHelper( registry.RegistryFinder.__name__, client_mock, client_id=client_id, keys_paths=keys_paths, conditions=conditions, token=self.token) return session_id
def _RunYaraProcessScan(self, procs, rules, ignore_grr_process=False, **kw): client_mock = action_mocks.ActionMock(yara_actions.YaraProcessScan) with utils.MultiStubber( (psutil, "process_iter", lambda: procs), (rdf_yara.YaraSignature, "GetRules", lambda self: rules)): for s in flow_test_lib.TestFlowHelper( yara_flows.YaraProcessScan.__name__, client_mock, yara_signature=test_yara_signature, client_id=self.client_id, ignore_grr_process=ignore_grr_process, token=self.token, **kw): session_id = s flow_obj = aff4.FACTORY.Open(session_id) self.assertEqual(len(flow_obj.ResultCollection()), 1) return flow_obj.ResultCollection()[0]
def testRunGrrClientActionArtifactSplit(self): """Test that artifacts get split into separate collections.""" with utils.Stubber(psutil, "process_iter", ProcessIter): client_mock = action_mocks.ActionMock(standard.ListProcesses) client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") client.Set(client.Schema.SYSTEM("Linux")) client.Flush() coll1 = artifact_registry.ArtifactSource( type=artifact_registry.ArtifactSource.SourceType. GRR_CLIENT_ACTION, attributes={"client_action": standard.ListProcesses.__name__}) self.fakeartifact.sources.append(coll1) self.fakeartifact2.sources.append(coll1) artifact_list = ["FakeArtifact", "FakeArtifact2"] for s in flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id, split_output_by_artifact=True): session_id = s # Check that we got two separate collections based on artifact name fd = collectors.ArtifactCollectorFlow.ResultCollectionForArtifact( session_id, "FakeArtifact", token=self.token) self.assertTrue(isinstance(list(fd)[0], rdf_client.Process)) self.assertEqual(len(fd), 1) fd = collectors.ArtifactCollectorFlow.ResultCollectionForArtifact( session_id, "FakeArtifact2", token=self.token) self.assertEqual(len(fd), 1) self.assertTrue(isinstance(list(fd)[0], rdf_client.Process))
def testNotificationIsSent(self): fixture_test_lib.ClientFixture(self.client_id, token=self.token) args = vfs_plugin.ApiCreateVfsRefreshOperationArgs( client_id=self.client_id, file_path=self.file_path, max_depth=0, notify_user=True) result = self.handler.Handle(args, token=self.token) if data_store.RelationalDBFlowsEnabled(): flow_test_lib.RunFlow( self.client_id, result.operation_id, check_flow_errors=False) else: # Finish flow and check if there are any new notifications. flow_urn = rdfvalue.RDFURN(result.operation_id) client_mock = action_mocks.ActionMock() flow_test_lib.TestFlowHelper( flow_urn, client_mock, client_id=self.client_id, token=self.token, check_flow_errors=False) pending_notifications = self.GetUserNotifications(self.token.username) self.assertIn("Recursive Directory Listing complete", pending_notifications[0].message) if data_store.RelationalDBReadEnabled(): self.assertEqual( pending_notifications[0].reference.vfs_file.path_components, ["Users", "Shared"]) else: self.assertEqual(pending_notifications[0].subject, self.client_id.Add(self.file_path))
def testClickingOnInterrogateStartsInterrogateFlow(self): self.Open("/#/clients/%s" % self.client_id) # A click on the Interrogate button starts a flow, disables the button and # shows a loading icon within the button. self.Click("css=button:contains('Interrogate'):not([disabled])") self.WaitUntil(self.IsElementPresent, "css=button:contains('Interrogate')[disabled]") self.WaitUntil(self.IsElementPresent, "css=button:contains('Interrogate') i") # Get the started flow and finish it, this will re-enable the button. client_id = rdf_client.ClientURN(self.client_id) fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flows = list(fd.ListChildren()) client_mock = action_mocks.ActionMock() for flow_urn in flows: flow_test_lib.TestFlowHelper( flow_urn, client_mock, client_id=client_id, token=self.token, check_flow_errors=False) self.WaitUntilNot(self.IsElementPresent, "css=button:contains('Interrogate')[disabled]") # Check if an Interrogate flow was started. self.Click("css=a[grrtarget='client.flows']") self.Click("css=td:contains('Interrogate')") self.WaitUntilContains( discovery.Interrogate.__name__, self.GetText, "css=table td.proto_key:contains('Flow name') " "~ td.proto_value")
def testFindWithMaxFiles(self): """Test that the Find flow works when specifying proto directly.""" client_mock = action_mocks.ActionMock(searching.Find) # Prepare a findspec. findspec = rdf_client.FindSpec( path_regex=".*", pathspec=rdf_paths.PathSpec( path="/", pathtype=rdf_paths.PathSpec.PathType.OS)) session_id = flow_test_lib.TestFlowHelper(find.FindFiles.__name__, client_mock, client_id=self.client_id, token=self.token, findspec=findspec, iteration_count=3, max_results=7) # Check the output file is created collection = flow.GRRFlow.ResultCollectionForFID(session_id) # Make sure we got the right number of results. self.assertEqual(len(collection), 7)
def testNoLogsIfBtimeSupported(self, db: abstract_db.Database): client_id = self.client_id db.WriteClientMetadata(client_id, fleetspeak_enabled=True) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id snapshot.knowledge_base.os = "Linux" snapshot.startup_info.client_info.timeline_btime_support = True db.WriteClientSnapshot(snapshot) with temp.AutoTempDirPath() as tempdir: args = rdf_timeline.TimelineArgs(root=tempdir.encode("utf-8")) flow_id = flow_test_lib.TestFlowHelper( timeline_flow.TimelineFlow.__name__, action_mocks.ActionMock(timeline_action.Timeline), client_id=client_id, creator=self.test_username, args=args) flow_test_lib.FinishAllFlowsOnClient(client_id) log_entries = db.ReadFlowLogEntries(client_id, flow_id, offset=0, count=1) self.assertEmpty(log_entries)
def wait_until_done(*args, **kwargs): actions = list(itervalues(client_actions.REGISTRY)) client_mock = action_mocks.ActionMock(*actions) flow_test_lib.FinishAllFlows(client_mock=client_mock) func(*args, **kwargs)