def testValidatesFlowName(self, db: abstract_db.Database): context = _CreateContext(db) class FakeFlow(flow_base.FlowBase): def Start(self): self.CallState("End") def End(self, responses: flow_responses.Responses) -> None: del responses # Unused. client_id = db_test_utils.InitializeClient(db) flow_id = flow_test_lib.TestFlowHelper(FakeFlow.__name__, client_id=client_id, creator=context.username) flow_test_lib.FinishAllFlowsOnClient(client_id) args = flow_plugin.ApiListParsedFlowResultsArgs() args.client_id = client_id args.flow_id = flow_id with self.assertRaisesRegex(ValueError, "artifact-collector"): self.handler.Handle(args, context=context)
def testArtifactSkipping(self): client_mock = action_mocks.ActionMock() client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") # This does not match the Artifact so it will not be collected. client.Set(client.Schema.SYSTEM("Windows")) kb = client.Get(client.Schema.KNOWLEDGE_BASE) kb.os = "Windows" client.Set(client.Schema.KNOWLEDGE_BASE, kb) client.Flush() artifact_list = ["FakeArtifact"] session_id = flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, use_tsk=False, token=self.token, client_id=self.client_id) flow_obj = aff4.FACTORY.Open(session_id, token=self.token) self.assertEqual(len(flow_obj.state.artifacts_skipped_due_to_condition), 1) self.assertEqual(flow_obj.state.artifacts_skipped_due_to_condition[0], ["FakeArtifact", "os == 'Linux'"])
def testCmdArtifact(self): """Check we can run command based artifacts and get anomalies.""" client_mock = self.MockClient( standard.ExecuteCommand, client_id=self.client_id) with utils.Stubber(subprocess, "Popen", client_test_lib.Popen): for _ in flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, client_id=self.client_id, store_results_in_aff4=True, use_tsk=False, artifact_list=["TestCmdArtifact"], token=self.token): pass urn = self.client_id.Add("info/software") fd = aff4.FACTORY.Open(urn, token=self.token) packages = fd.Get(fd.Schema.INSTALLED_PACKAGES) self.assertEqual(len(packages), 2) self.assertEqual(packages[0].__class__.__name__, "SoftwarePackage") anomaly_coll = aff4_grr.VFSGRRClient.AnomalyCollectionForCID(self.client_id) self.assertEqual(len(anomaly_coll), 1) self.assertTrue("gremlin" in anomaly_coll[0].symptom)
def Run(self): client_id = self.SetupClient(0) email_descriptor = rdf_output_plugin.OutputPluginDescriptor( plugin_name=email_plugin.EmailOutputPlugin.__name__, plugin_args=email_plugin.EmailOutputPluginArgs( email_address="test@localhost", emails_limit=42)) with test_lib.FakeTime(42): flow_urn = flow.StartFlow( flow_name=flow_test_lib.DummyFlowWithSingleReply.__name__, client_id=client_id, output_plugins=[email_descriptor], token=self.token) with test_lib.FakeTime(43): flow_test_lib.TestFlowHelper(flow_urn, token=self.token) self.Check("ListFlowOutputPluginLogs", args=flow_plugin.ApiListFlowOutputPluginLogsArgs( client_id=client_id.Basename(), flow_id=flow_urn.Basename(), plugin_id="EmailOutputPlugin_0"), replace={flow_urn.Basename(): "W:ABCDEF"})
def testFetchesAndStoresBinary(self): client_id = self.SetupClient(0) process = rdf_client.Process( pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=1333718907167083) client_mock = action_mocks.ListProcessesMock([process]) session_id = flow_test_lib.TestFlowHelper( flow_processes.ListProcesses.__name__, client_mock, client_id=client_id, fetch_binaries=True, creator=self.test_username) binaries = flow_test_lib.GetFlowResults(client_id, session_id) self.assertLen(binaries, 1) self.assertEqual(binaries[0].pathspec.path, process.exe) self.assertEqual(binaries[0].st_size, os.stat(process.exe).st_size)
def testRecursiveListDirectoryTrivial(self): """Test that RecursiveListDirectory lists files only up to max depth.""" client_mock = action_mocks.ListDirectoryClientMock() dir_components = ["dir1", "dir2"] with temp.AutoTempDirPath(remove_non_empty=True) as temp_dirpath: os.makedirs(os.path.join(temp_dirpath, *dir_components)) pathspec = rdf_paths.PathSpec( path=temp_dirpath, pathtype=rdf_paths.PathSpec.PathType.OS) flow_id = flow_test_lib.TestFlowHelper(compatibility.GetName( filesystem.RecursiveListDirectory), client_mock, client_id=self.client_id, pathspec=pathspec, max_depth=1, token=self.token) results = flow_test_lib.GetFlowResults(self.client_id, flow_id) self.assertLen(results, 1) self.assertEqual(results[0].pathspec.Basename(), "dir1")
def testListingRegistryDirectoryDoesNotYieldMtimes(self): with vfs_test_lib.RegistryVFSStubber(): client_id = self.SetupClient(0) pb = rdf_paths.PathSpec( path="/HKEY_LOCAL_MACHINE/SOFTWARE/ListingTest", pathtype=rdf_paths.PathSpec.PathType.REGISTRY) client_mock = action_mocks.ListDirectoryClientMock() flow_test_lib.TestFlowHelper(compatibility.GetName( filesystem.ListDirectory), client_mock, client_id=client_id, pathspec=pb, token=self.token) children = data_store.REL_DB.ListChildPathInfos( self.client_id, rdf_objects.PathInfo.PathType.REGISTRY, ["HKEY_LOCAL_MACHINE", "SOFTWARE", "ListingTest"]) self.assertLen(children, 2) for child in children: self.assertIsNone(child.stat_entry.st_mtime)
def testWorksCorrectlyWithTestOutputPluginOnFlowWithSingleResult(self): with test_lib.FakeTime(42): sid = flow_test_lib.TestFlowHelper( compatibility.GetName(flow_test_lib.DummyFlowWithSingleReply), client_id=self.client_id, token=self.token) result = self.handler.Handle( flow_plugin.ApiGetExportedFlowResultsArgs( client_id=self.client_id, flow_id=sid, plugin_name=test_plugins.TestInstantOutputPlugin.plugin_name), context=self.context) chunks = list(result.GenerateContent()) self.assertListEqual(chunks, [ "Start: aff4:/%s/flows/%s" % (self.client_id, sid), "Values of type: RDFString", "First pass: oh (source=aff4:/%s)" % self.client_id, "Second pass: oh (source=aff4:/%s)" % self.client_id, "Finish: aff4:/%s/flows/%s" % (self.client_id, sid) ])
def testProcessListingOnly(self): """Test that the ListProcesses flow works.""" client_id = self.SetupClient(0) client_mock = action_mocks.ListProcessesMock([ rdf_client.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=1333718907167083) ]) session_id = flow_test_lib.TestFlowHelper(compatibility.GetName( flow_processes.ListProcesses), client_mock, client_id=client_id, token=self.token) processes = flow_test_lib.GetFlowResults(client_id, session_id) self.assertLen(processes, 1) self.assertEqual(processes[0].ctime, 1333718907167083) self.assertEqual(processes[0].cmdline, ["cmd.exe"])
def testFetchesAndStoresBinary(self): process = rdf_client.Process(pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) client_mock = ListProcessesMock([process]) for s in flow_test_lib.TestFlowHelper( flow_processes.ListProcesses.__name__, client_mock, client_id=test_lib.TEST_CLIENT_ID, fetch_binaries=True, token=self.token): session_id = s results = flow.GRRFlow.ResultCollectionForFID(session_id) binaries = list(results) self.assertEqual(len(binaries), 1) self.assertEqual(binaries[0].pathspec.path, process.exe) self.assertEqual(binaries[0].st_size, os.stat(process.exe).st_size)
def testFlowLogging(self): """Check that flows log correctly.""" flow_urn = None for session_id in flow_test_lib.TestFlowHelper( flow_test_lib.DummyLogFlow.__name__, action_mocks.ActionMock(), token=self.token, client_id=self.client_id): flow_urn = session_id log_collection = flow.GRRFlow.LogCollectionForFID(flow_urn) self.assertEqual(len(log_collection), 8) for log in log_collection: self.assertEqual(log.client_id, self.client_id) self.assertTrue(log.log_message in [ "First", "Second", "Third", "Fourth", "Uno", "Dos", "Tres", "Cuatro" ]) self.assertTrue(log.flow_name in [ flow_test_lib.DummyLogFlow.__name__, flow_test_lib.DummyLogFlowChild.__name__ ]) self.assertTrue(str(flow_urn) in str(log.urn))
def testOSReleaseBreakdownReportPlugin(self): # Add a client to be reported. self.SetupClients(1) # Scan for clients to be reported (the one we just added). flow_test_lib.TestFlowHelper(cron_system.OSBreakDown.__name__, token=self.token) report = report_plugins.GetReportByName( client_report_plugins.OSReleaseBreakdown30ReportPlugin.__name__) api_report_data = report.GetReportData(stats_api.ApiGetReportArgs( name=report.__class__.__name__, client_label="All"), token=self.token) self.assertEqual( api_report_data, rdf_report_plugins.ApiReportData( pie_chart=rdf_report_plugins.ApiPieChartReportData(data=[ rdf_report_plugins.ApiReportDataPoint1D(label="Unknown", x=1) ]), representation_type=RepresentationType.PIE_CHART))
def _SetupAndRunVersionBreakDownCronjob(token=None): with test_lib.FakeTime(44): manager = aff4_cronjobs.GetCronManager() if data_store.RelationalDBReadEnabled("cronjobs"): cron_job_name = compatibility.GetName( cron_system.GRRVersionBreakDownCronJob) cronjobs.ScheduleSystemCronJobs(names=[cron_job_name]) manager.RunOnce() manager._GetThreadPool().Stop() else: cron_job_name = compatibility.GetName( cron_system.GRRVersionBreakDown) aff4_cronjobs.ScheduleSystemCronFlows(names=[cron_job_name], token=token) manager.RunOnce(token=token) run_id = _GetRunId(cron_job_name, token=token) flow_test_lib.TestFlowHelper(rdfvalue.RDFURN( "aff4:/cron/%s/%s" % (cron_job_name, run_id)), token=token) manager.RunOnce(token=token) return cron_job_name
def testOverviewIsShownForNestedFlows(self): flow_test_lib.TestFlowHelper(gui_test_lib.RecursiveTestFlow.__name__, self.action_mock, client_id=self.client_id, token=self.token) self.Open("/#/clients/%s" % self.client_id) self.Click("css=a[grrtarget='client.flows']") # There should be a RecursiveTestFlow in the list. Expand nested flows. self.Click("css=tr:contains('RecursiveTestFlow') span.tree_branch") # Click on a nested flow. self.Click("css=tr:contains('RecursiveTestFlow'):nth(2)") # Nested flow should have Depth argument set to 1. self.WaitUntil(self.IsElementPresent, "css=td:contains('Depth') ~ td:nth(0):contains('1')") # Check that flow id of this flow has forward slash - i.e. consists of # 2 components. self.WaitUntil(self.IsTextPresent, "Flow ID") flow_id = self.GetText("css=dt:contains('Flow ID') ~ dd:nth(0)") self.assertTrue("/" in flow_id)
def testExternalFileStoreSubmissionIsTriggeredWhenFileIsSentToFileStore( self, add_file_mock): client_mock = action_mocks.GetFileClientMock() pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS, path=os.path.join( self.base_path, "test_img.dd")) flow_test_lib.TestFlowHelper(compatibility.GetName(transfer.GetFile), client_mock, token=self.token, client_id=self.client_id, pathspec=pathspec) add_file_mock.assert_called_once() args = add_file_mock.call_args_list[0][0] hash_id = list(args[0].keys())[0] self.assertIsInstance(hash_id, rdf_objects.SHA256HashID) self.assertEqual(args[0][hash_id].client_path, db.ClientPath.FromPathSpec(self.client_id, pathspec)) self.assertNotEmpty(args[0][hash_id].blob_refs) for blob_ref in args[0][hash_id].blob_refs: self.assertIsInstance(blob_ref, rdf_objects.BlobReference)
def testFlowSerialization2(self): """Check that we can serialize flows.""" class TestClientMock(object): in_rdfvalue = rdf_client.EchoRequest out_rdfvalues = [rdf_protodict.DataBlob] def __init__(self): # Register us as an action plugin. # TODO(user): this is a hacky shortcut and should be fixed. server_stubs.ClientActionStub.classes["ReturnBlob"] = self self.__name__ = "ReturnBlob" def ReturnBlob(self, unused_args): return [rdf_protodict.DataBlob(integer=100)] # Run the flow in the simulated way flow_test_lib.TestFlowHelper( "FlowResponseSerialization", TestClientMock(), token=self.token, client_id=self.client_id)
def testFailedFlowUpdatesStats(self): cron_manager = aff4_cronjobs.GetCronManager() cron_args = rdf_cronjobs.CreateCronJobFlowArgs( allow_overruns=False, periodicity="1d") cron_args.flow_runner_args.flow_name = "FailingFakeCronJob" job_id = cron_manager.CreateJob(cron_args=cron_args, token=self.token) prev_metric_value = stats.STATS.GetMetricValue( "cron_job_failure", fields=[job_id]) cron_manager.RunOnce(token=self.token) cron_job = cron_manager.ReadJob(job_id, token=self.token) cron_flow_urn = cron_job.Get(cron_job.Schema.CURRENT_FLOW_URN) flow_test_lib.TestFlowHelper( cron_flow_urn, check_flow_errors=False, token=self.token) # This RunOnce call should determine that the flow has failed cron_manager.RunOnce(token=self.token) # Check that stats got updated current_metric_value = stats.STATS.GetMetricValue( "cron_job_failure", fields=[job_id]) self.assertEqual(current_metric_value - prev_metric_value, 1)
def testInterrogateCloudMetadataWindows(self): """Check google cloud metadata on windows.""" self.SetupClients(1, system="Windows", os_version="6.2", arch="AMD64") with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.REGISTRY, vfs_test_lib.FakeRegistryVFSHandler): with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.FakeFullVFSHandler): client_mock = action_mocks.InterrogatedClient() client_mock.InitializeClient(system="Windows", version="6.1.7600", kernel="6.1.7601") with mock.patch.object(platform, "system", return_value="Windows"): for _ in flow_test_lib.TestFlowHelper( discovery.Interrogate.__name__, client_mock, token=self.token, client_id=self.client_id): pass self.fd = aff4.FACTORY.Open(self.client_id, token=self.token) self._CheckCloudMetadata()
def testGlobWithWildcardsInsideTSKFileCaseInsensitive(self): client_mock = action_mocks.GlobClientMock() # This glob should find this file in test data: glob_test/a/b/foo. path = os.path.join("*", "a", "b", "FOO*") root_path = rdf_paths.PathSpec(path=os.path.join( self.base_path, "test_IMG.dd"), pathtype=rdf_paths.PathSpec.PathType.OS) root_path.Append(path="/", pathtype=rdf_paths.PathSpec.PathType.TSK) # Run the flow. flow_test_lib.TestFlowHelper(compatibility.GetName(filesystem.Glob), client_mock, client_id=self.client_id, paths=[path], root_path=root_path, pathtype=rdf_paths.PathSpec.PathType.OS, token=self.token) children = self._ListTestChildPathInfos( ["test_img.dd", "glob_test", "a", "b"]) self.assertLen(children, 1) self.assertEqual(children[0].components[-1], "foo")
def testInterrogateCloudMetadataLinux(self): """Check google cloud metadata on linux.""" self.client_id = self.SetupClient(0, system="Linux", os_version="12.04") with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.FakeTestDataVFSHandler): with test_lib.ConfigOverrider({ "Artifacts.knowledge_base": ["LinuxWtmp", "NetgroupConfiguration", "LinuxRelease"], "Artifacts.netgroup_filter_regexes": [r"^login$"] }): client_mock = action_mocks.InterrogatedClient() client_mock.InitializeClient() for _ in flow_test_lib.TestFlowHelper( discovery.Interrogate.__name__, client_mock, token=self.token, client_id=self.client_id): pass self.fd = aff4.FACTORY.Open(self.client_id, token=self.token) self._CheckCloudMetadata()
def testCreatorPropagation(self): # Instantiate the flow using one username. session_id = flow.GRRFlow.StartFlow(client_id=self.client_id, flow_name="ParentFlow", sync=False, token=access_control.ACLToken( username="******", reason="testing")) # Run the flow using another user ("test"). flow_test_lib.TestFlowHelper(session_id, ClientMock(), client_id=self.client_id, token=self.token) self.assertEqual(ParentFlow.success, True) subflows = list(obj for obj in aff4.FACTORY.Open( session_id, token=self.token).OpenChildren() if isinstance(obj, flow.GRRFlow)) self.assertEqual(len(subflows), 1) self.assertEqual(subflows[0].GetRunner().context.creator, "original_user")
def testExportCommandIsShownForStatEntryResults(self): flow_urn = flow.GRRFlow.StartFlow( flow_name=gui_test_lib.FlowWithOneStatEntryResult.__name__, client_id=self.client_id, token=self.token) for _ in flow_test_lib.TestFlowHelper(flow_urn, self.action_mock, client_id=self.client_id, token=self.token): pass self.Open("/#/clients/C.0000000000000001/flows") self.Click("css=td:contains('FlowWithOneStatEntryResult')") self.Click("css=li[heading=Results]") self.Click("link=Show export command") self.WaitUntil( self.IsTextPresent, "/usr/bin/grr_api_shell 'http://localhost:8000/' " "--exec_code 'grrapi.Client(\"C.0000000000000001\")." "Flow(\"%s\").GetFilesArchive()." "WriteToFile(\"./flow_results_C_0000000000000001_%s.zip\")'" % (flow_urn.Basename(), flow_urn.Basename().replace(":", "_")))
def testCmdArtifact(self): """Check we can run command based artifacts and get anomalies.""" client_id = test_lib.TEST_CLIENT_ID client_mock = self.MockClient(standard.ExecuteCommand, client_id=client_id) with utils.Stubber(subprocess, "Popen", client_test_lib.Popen): session_id = flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, client_id=client_id, use_tsk=False, artifact_list=["TestCmdArtifact"], token=self.token) results = flow_test_lib.GetFlowResults(client_id, session_id) self.assertLen(results, 2) packages = [ p for p in results if isinstance(p, rdf_client.SoftwarePackages) ] self.assertLen(packages, 1) anomalies = [a for a in results if isinstance(a, rdf_anomaly.Anomaly)] self.assertLen(anomalies, 1) self.assertIn("gremlin", anomalies[0].symptom)
def testIllegalGlobAsync(self): # When running the flow asynchronously, we will not receive any errors from # the Start method, but the flow should still fail. paths = ["Test/%%Weird_illegal_attribute%%"] client_mock = action_mocks.GlobClientMock() # Run the flow. session_id = None # This should not raise here since the flow is run asynchronously. session_id = flow_test_lib.TestFlowHelper( filesystem.Glob.__name__, client_mock, client_id=self.client_id, check_flow_errors=False, paths=paths, pathtype=rdf_paths.PathSpec.PathType.OS, token=self.token, sync=False) fd = aff4.FACTORY.Open(session_id, token=self.token) self.assertIn("KnowledgeBaseInterpolationError", fd.context.backtrace) self.assertEqual("ERROR", str(fd.context.state))
def RunFlow(self, client_id, keys_paths=None, conditions=None): if keys_paths is None: keys_paths = [ "HKEY_USERS/S-1-5-20/Software/Microsoft/" "Windows/CurrentVersion/Run/*" ] if conditions is None: conditions = [] client_mock = action_mocks.ActionMock( searching.Find, searching.Grep, ) session_id = flow_test_lib.TestFlowHelper( registry.RegistryFinder.__name__, client_mock, client_id=client_id, keys_paths=keys_paths, conditions=conditions, token=self.token) return session_id
def testConditionsOutBinariesUsingRegex(self): process1_exe = "\\WINDOWS\\bar.exe" process2_exe = "\\WINDOWS\\foo.exe" client_mock = ListVADBinariesActionMock([process1_exe, process2_exe]) with test_lib.ConfigOverrider({"Rekall.enabled": True}): session_id = flow_test_lib.TestFlowHelper( memory.ListVADBinaries.__name__, client_mock, client_id=self.client_id, token=self.token, filename_regex=".*bar\\.exe$", fetch_binaries=True) fd = flow.GRRFlow.ResultCollectionForFID(session_id) binaries = list(fd) self.assertEqual(len(binaries), 1) self.assertEqual(binaries[0].pathspec.CollapsePath(), "/C:/WINDOWS/bar.exe") fd = aff4.FACTORY.Open( binaries[0].AFF4Path(self.client_id), token=self.token) self.assertEqual(fd.Read(1024), "just bar")
def testRunGrrClientActionArtifact(self): """Test we can get a GRR client artifact.""" with utils.Stubber(psutil, "process_iter", ProcessIter): client_mock = action_mocks.ActionMock(standard.ListProcesses) client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") client.Set(client.Schema.SYSTEM("Linux")) client.Flush() coll1 = rdf_artifacts.ArtifactSource( type=rdf_artifacts.ArtifactSource.SourceType.GRR_CLIENT_ACTION, attributes={"client_action": standard.ListProcesses.__name__}) self.fakeartifact.sources.append(coll1) artifact_list = ["FakeArtifact"] session_id = flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id) fd = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertTrue(isinstance(list(fd)[0], rdf_client.Process)) self.assertTrue(len(fd) == 1)
def testIgnoresMissingFiles(self): process1_exe = "\\WINDOWS\\bar.exe" client_mock = ListVADBinariesActionMock([process1_exe]) with test_lib.ConfigOverrider({"Rekall.enabled": True}): for s in flow_test_lib.TestFlowHelper( memory.ListVADBinaries.__name__, client_mock, check_flow_errors=False, client_id=self.client_id, token=self.token, fetch_binaries=True): session_id = s fd = flow.GRRFlow.ResultCollectionForFID(session_id, token=self.token) binaries = list(fd) self.assertEqual(len(binaries), 1) self.assertEqual(binaries[0].pathspec.CollapsePath(), "/C:/WINDOWS/bar.exe") fd = aff4.FACTORY.Open( binaries[0].AFF4Path(self.client_id), token=self.token) self.assertEqual(fd.Read(1024), "just bar")
def testUpdateClient(self): client_mock = action_mocks.UpdateAgentClientMock() fake_installer = b"FakeGRRDebInstaller" * 20 upload_path = signed_binary_utils.GetAFF4ExecutablesRoot().Add( config.CONFIG["Client.platform"]).Add("test.deb") maintenance_utils.UploadSignedConfigBlob(fake_installer, aff4_path=upload_path, limit=100, token=self.token) blob_list, _ = signed_binary_utils.FetchBlobsForSignedBinary( upload_path, token=self.token) self.assertLen(list(blob_list), 4) acl_test_lib.CreateAdminUser(self.token.username) flow_test_lib.TestFlowHelper(administrative.UpdateClient.__name__, client_mock, client_id=self.SetupClient(0, system=""), blob_path=upload_path, token=self.token) self.assertEqual(client_mock.GetDownloadedFileContents(), fake_installer)
def testCacheGrep(self): """Test the Cache Grep plugin.""" with self.MockClientRawDevWithImage(): # Run the flow in the simulated way session_id = flow_test_lib.TestFlowHelper( webhistory.CacheGrep.__name__, self.client_mock, check_flow_errors=False, client_id=self.client_id, grep_users=["test"], data_regex=b"ENIAC", pathtype=rdf_paths.PathSpec.PathType.TSK, token=self.token) # Check if the collection file was created. hits = flow_test_lib.GetFlowResults(self.client_id, session_id) # There should be one hit. self.assertLen(hits, 1) # Get the first hit. self.assertIsInstance(hits[0], rdf_client_fs.StatEntry) self.assertEqual(hits[0].pathspec.last.path, "/home/test/.config/google-chrome/Default/Cache/data_1")