def setUp(self): super(TestFileView, self).setUp() # Prepare our fixture. self.client_id, self.unapproved_client_id = [ u.Basename() for u in self.SetupClients(2) ] fixture_test_lib.ClientFixture(self.client_id, self.token) self.content_1, self.content_2 = gui_test_lib.CreateFileVersions( rdf_client.ClientURN(self.client_id), self.token) self.content_1_hash = rdf_objects.SHA256HashID.FromData( self.content_1).AsBytes() self.content_2_hash = rdf_objects.SHA256HashID.FromData( self.content_2).AsBytes() self.RequestAndGrantClientApproval(self.client_id)
def Run(self): client_id = self.SetupClient(0) self.file_path = "fs/os/c/bin/bash" fixture_test_lib.ClientFixture(client_id) def ReplaceFlowId(): flows = data_store.REL_DB.ReadAllFlowObjects(client_id=client_id) return {flows[0].flow_id: "W:ABCDEF"} with test_lib.FakeTime(42): self.Check("UpdateVfsFileContent", args=vfs_plugin.ApiUpdateVfsFileContentArgs( client_id=client_id, file_path=self.file_path), replace=ReplaceFlowId)
def Run(self): fixture_test_lib.ClientFixture(self.client_id, token=self.token) def ReplaceFlowId(): flows_dir_fd = aff4.FACTORY.Open( self.client_id.Add("flows"), token=self.token) flow_urn = list(flows_dir_fd.ListChildren())[0] return {flow_urn.Basename(): "W:ABCDEF"} with test_lib.FakeTime(42): self.Check( "UpdateVfsFileContent", args=vfs_plugin.ApiUpdateVfsFileContentArgs( client_id=self.client_id.Basename(), file_path=self.file_path), replace=ReplaceFlowId)
def setUp(self): super(TestFileView, self).setUp() # Prepare our fixture. self.client_id, self.unapproved_client_id = self.SetupClients(2) with test_lib.FakeTime(test_lib.FIXED_TIME): fixture_test_lib.ClientFixture(self.client_id) self.content_1, self.content_2 = gui_test_lib.CreateFileVersions( self.client_id) self.content_1_hash = rdf_objects.SHA256HashID.FromData( self.content_1).AsBytes() self.content_2_hash = rdf_objects.SHA256HashID.FromData( self.content_2).AsBytes() self.RequestAndGrantClientApproval(self.client_id)
def setUp(self): super(TestHostInformation, self).setUp() self.client_id = u"C.0000000000000001" with test_lib.FakeTime(test_lib.FIXED_TIME): fixture_test_lib.ClientFixture(self.client_id) self.RequestAndGrantClientApproval(self.client_id) self._WriteClientSnapshot(gui_test_lib.TIME_0, "6.1.7000", "Hostname T0", 4294967296) self._WriteClientSnapshot(gui_test_lib.TIME_1, "6.1.8000", "Hostname T1", 8589934592) self._WriteClientSnapshot(gui_test_lib.TIME_2, "7.0.0000", "Hostname T2", 12884901888)
def SetupTestTimeline(self): self.client_id = self.SetupClient(0) fixture_test_lib.ClientFixture(self.client_id, token=self.token) # Choose some directory with pathspec in the ClientFixture. self.folder_path = "fs/os/Users/中国新闻网新闻中/Shared" self.file_path = self.folder_path + "/a.txt" file_urn = self.client_id.Add(self.file_path) for i in range(0, 5): with test_lib.FakeTime(i): with aff4.FACTORY.Create( file_urn, aff4_grr.VFSFile, mode="w", token=self.token) as fd: stats = rdf_client.StatEntry( st_mtime=rdfvalue.RDFDatetimeSeconds().Now()) fd.Set(fd.Schema.STAT, stats)
def testCollectRunKeyBinaries(self): """Read Run key from the client_fixtures to test parsing and storage.""" client_id = test_lib.TEST_CLIENT_ID fixture_test_lib.ClientFixture(client_id, token=self.token) client = aff4.FACTORY.Open(client_id, token=self.token, mode="rw") client.Set(client.Schema.SYSTEM("Windows")) client.Set(client.Schema.OS_VERSION("6.2")) client.Flush() with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.FakeFullVFSHandler): client_mock = action_mocks.ActionMock( file_fingerprint.FingerprintFile, searching.Find, standard.StatFile, ) # Get KB initialized for s in flow_test_lib.TestFlowHelper( artifact.KnowledgeBaseInitializationFlow.__name__, client_mock, client_id=client_id, token=self.token): session_id = s col = flow.GRRFlow.ResultCollectionForFID(session_id) client.Set(client.Schema.KNOWLEDGE_BASE, list(col)[0]) client.Flush() with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: # Run the flow in the emulated way. for _ in flow_test_lib.TestFlowHelper( registry.CollectRunKeyBinaries.__name__, client_mock, client_id=client_id, token=self.token): pass # Check MultiGetFile got called for our runkey file download_requested = False for pathspec in getfile_instrument.args[0][0].args.pathspecs: if pathspec.path == u"C:\\Windows\\TEMP\\A.exe": download_requested = True self.assertTrue(download_requested)
def testHandlerRefreshStartsListDirectoryFlow(self): fixture_test_lib.ClientFixture(self.client_id, token=self.token) args = vfs_plugin.ApiCreateVfsRefreshOperationArgs( client_id=self.client_id, file_path=self.file_path, max_depth=1) result = self.handler.Handle(args, token=self.token) if data_store.RelationalDBFlowsEnabled(): flow_obj = data_store.REL_DB.ReadFlowObject(self.client_id.Basename(), result.operation_id) self.assertEqual(flow_obj.flow_class_name, "ListDirectory") else: # Check returned operation_id to references a ListDirectory flow. flow_urn = self.client_id.Add("flows").Add(result.operation_id) flow_obj = aff4.FACTORY.Open(flow_urn, token=self.token) self.assertEqual( flow_obj.Get(flow_obj.Schema.TYPE), filesystem.ListDirectory.__name__)
def testInterrogateLinuxWithWtmp(self): """Test the Interrogate flow.""" fixture_test_lib.ClientFixture(self.client_id, token=self.token) self.SetupClients(1, system="Linux", os_version="12.04") with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.FakeTestDataVFSHandler): with test_lib.ConfigOverrider({ "Artifacts.knowledge_base": ["LinuxWtmp", "NetgroupConfiguration", "LinuxRelease"], "Artifacts.interrogate_store_in_aff4": [], "Artifacts.netgroup_filter_regexes": [r"^login$"] }): client_mock = action_mocks.InterrogatedClient() client_mock.InitializeClient() for _ in flow_test_lib.TestFlowHelper( discovery.Interrogate.__name__, client_mock, token=self.token, client_id=self.client_id): pass self.fd = aff4.FACTORY.Open(self.client_id, token=self.token) self._CheckAFF4Object("test_node", "Linux", 100 * 1000000) self._CheckClientInfo() self._CheckGRRConfig() self._CheckNotificationsCreated() self._CheckClientSummary("Linux", "14.4", release="Ubuntu", kernel="3.13.0-39-generic") self._CheckRelease("Ubuntu", "14.4") # users 1,2,3 from wtmp # users yagharek, isaac from netgroup self._CheckUsers( ["yagharek", "isaac", "user1", "user2", "user3"]) self._CheckNetworkInfo() self._CheckVFS() self._CheckLabelIndex() self._CheckClientKwIndex(["Linux"], 1) self._CheckClientKwIndex(["Label2"], 1) self._CheckClientLibraries() self._CheckMemory()
def testHandlerStartsFlow(self): fixture_test_lib.ClientFixture(self.client_id, token=self.token) args = vfs_plugin.ApiUpdateVfsFileContentArgs( client_id=self.client_id, file_path=self.file_path) result = self.handler.Handle(args, token=self.token) if data_store.RelationalDBFlowsEnabled(): flow_obj = data_store.REL_DB.ReadFlowObject(self.client_id.Basename(), result.operation_id) self.assertEqual(flow_obj.flow_class_name, transfer.MultiGetFile.__name__) self.assertEqual(flow_obj.creator, self.token.username) else: # Check returned operation_id to references a MultiGetFile flow. flow_urn = self.client_id.Add("flows").Add(result.operation_id) flow_obj = aff4.FACTORY.Open(flow_urn, token=self.token) self.assertEqual( flow_obj.Get(flow_obj.Schema.TYPE), transfer.MultiGetFile.__name__)
def Run(self): client_id = self.SetupClient(0) # Choose some directory with pathspec in the ClientFixture. self.file_path = "fs/os/Users/Shared" fixture_test_lib.ClientFixture(client_id) def ReplaceFlowId(): flows = data_store.REL_DB.ReadAllFlowObjects(client_id=client_id) return {flows[0].flow_id: "ABCDEF"} with test_lib.FakeTime(42): self.Check( "CreateVfsRefreshOperation", args=vfs_plugin.ApiCreateVfsRefreshOperationArgs( client_id=client_id, file_path=self.file_path, max_depth=1), replace=ReplaceFlowId)
def testInterrogateWindows(self): """Test the Interrogate flow.""" fixture_test_lib.ClientFixture(self.client_id, token=self.token) self.SetupClients(1, system="Windows", os_version="6.2", arch="AMD64") with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.REGISTRY, vfs_test_lib.FakeRegistryVFSHandler): with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.FakeFullVFSHandler): client_mock = action_mocks.InterrogatedClient() client_mock.InitializeClient(system="Windows", version="6.1.7600", kernel="6.1.7601") # Run the flow in the simulated way for _ in flow_test_lib.TestFlowHelper( discovery.Interrogate.__name__, client_mock, token=self.token, client_id=self.client_id): pass self.fd = aff4.FACTORY.Open(self.client_id, token=self.token) self._CheckAFF4Object("test_node", "Windows", 100 * 1000000) self._CheckClientInfo() self._CheckGRRConfig() self._CheckNotificationsCreated() self._CheckClientSummary("Windows", "6.1.7600", kernel="6.1.7601") # jim parsed from registry profile keys self._CheckUsers(["jim", "kovacs"]) self._CheckNetworkInfo() self._CheckVFS() self._CheckLabelIndex() self._CheckWindowsDiskInfo() self._CheckRegistryPathspec() self._CheckClientKwIndex(["Linux"], 0) self._CheckClientKwIndex(["Windows"], 1) self._CheckClientKwIndex(["Label2"], 1) self._CheckMemory()
def testAnalyzeClient(self): index = aff4.FACTORY.Create( "aff4:/client-index/", aff4_type=client_index.ClientIndex, mode="rw", token=self.token) fixture_test_lib.ClientFixture("aff4:/" + CLIENT_ID, token=self.token) client = aff4.FACTORY.Create( "aff4:/" + CLIENT_ID, aff4_type=aff4_grr.VFSGRRClient, mode="rw", token=self.token) kb = rdf_client.KnowledgeBase() kb.users.Append( rdf_client.User( username="******", full_name="Eric (Bertrand ) 'Russell' \"Logician\" Jacobson")) kb.users.Append( rdf_client.User(username="******", full_name="Steve O'Bryan")) client.Set(client.Schema.KNOWLEDGE_BASE(kb)) _, keywords = index.AnalyzeClient(client) # Should not contain an empty string. self.assertNotIn("", keywords) # OS of the client self.assertIn("windows", keywords) # Users of the client. self.assertIn("bert", keywords) self.assertIn("bertrand", keywords) self.assertNotIn(")", keywords) self.assertIn("russell", keywords) self.assertIn("logician", keywords) self.assertIn("ernie", keywords) self.assertIn("eric", keywords) self.assertIn("jacobson", keywords) self.assertIn("steve o'bryan", keywords) self.assertIn("o'bryan", keywords) # Client information. self.assertIn("grr monitor", keywords) self.assertIn("client-label-23", keywords)
def Run(self): client_id = self.SetupClient(0) # Choose some directory with pathspec in the ClientFixture. self.file_path = "fs/os/Users/Shared" fixture_test_lib.ClientFixture(client_id, token=self.token) def ReplaceFlowId(): flows_dir_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flow_urn = list(flows_dir_fd.ListChildren())[0] return {flow_urn.Basename(): "W:ABCDEF"} with test_lib.FakeTime(42): self.Check("CreateVfsRefreshOperation", args=vfs_plugin.ApiCreateVfsRefreshOperationArgs( client_id=client_id.Basename(), file_path=self.file_path, max_depth=1), replace=ReplaceFlowId)
def testGrrMessageConverter(self): payload = DummyTestRDFValue4("some") msg = rdf_flows.GrrMessage(payload=payload) msg.source = self.client_id fixture_test_lib.ClientFixture(self.client_id) metadata = base.ExportedMetadata( source_urn=rdfvalue.RDFURN("aff4:/hunts/" + str(queues.HUNTS) + ":000000/Results")) converter = grr_message.GrrMessageConverter() with test_lib.FakeTime(2): results = list(converter.Convert(metadata, msg)) self.assertLen(results, 1) self.assertEqual(results[0].timestamp, rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)) self.assertEqual( results[0].source_urn, "aff4:/hunts/" + str(queues.HUNTS) + ":000000/Results")
def testVFSFileStartsNewMultiGetFileWhenLockingFlowHasFinished(self): """A new MultiFileGet can be started when the locking flow has finished.""" client_id = self.SetupClients(1)[0] fixture_test_lib.ClientFixture(client_id, token=self.token) # We need to choose a file path having a pathsepc. path = "fs/os/c/bin/bash" with aff4.FACTORY.Create(client_id.Add(path), aff4_type=aff4_grr.VFSFile, mode="rw", token=self.token) as file_fd: # Starts a MultiGetFile flow. first_update_flow_urn = file_fd.Update() # Check that there is exactly one flow on the client. flows_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flows = list(flows_fd.ListChildren()) self.assertEqual(len(flows), 1) # Finish the flow holding the lock. client_mock = action_mocks.ActionMock() for _ in flow_test_lib.TestFlowHelper(flows[0], client_mock, client_id=client_id, token=self.token): pass # The flow holding the lock has finished, so Update() should start a new # flow. second_update_flow_urn = file_fd.Update() # There should be two flows now. flows_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flows = list(flows_fd.ListChildren()) self.assertEqual(len(flows), 2) # Make sure that each Update() started a new flow and that the second flow # is holding the lock. self.assertNotEqual(first_update_flow_urn, second_update_flow_urn) self.assertEqual(second_update_flow_urn, file_fd.Get(file_fd.Schema.CONTENT_LOCK))
def testNotificationIsSent(self): fixture_test_lib.ClientFixture(self.client_id) args = vfs_plugin.ApiCreateVfsRefreshOperationArgs( client_id=self.client_id, file_path=self.file_path, max_depth=0, notify_user=True) result = self.handler.Handle(args, context=self.context) flow_test_lib.RunFlow( self.client_id, result.operation_id, check_flow_errors=False) pending_notifications = self.GetUserNotifications(self.context.username) self.assertIn("Recursive Directory Listing complete", pending_notifications[0].message) self.assertEqual( pending_notifications[0].reference.vfs_file.path_components, ["Users", "Shared"])
def Run(self): client_urn = self.SetupClient(0) client_id = client_urn.Basename() self.file_path = "fs/os/c/bin/bash" fixture_test_lib.ClientFixture(client_urn, token=self.token) def ReplaceFlowId(): if data_store.RelationalDBEnabled(): flows = data_store.REL_DB.ReadAllFlowObjects( client_id=client_id) return {flows[0].flow_id: "W:ABCDEF"} else: flows_dir_fd = aff4.FACTORY.Open(client_urn.Add("flows"), token=self.token) flow_urn = list(flows_dir_fd.ListChildren())[0] return {flow_urn.Basename(): "W:ABCDEF"} with test_lib.FakeTime(42): self.Check("UpdateVfsFileContent", args=vfs_plugin.ApiUpdateVfsFileContentArgs( client_id=client_id, file_path=self.file_path), replace=ReplaceFlowId)
def testInterrogateCloudMetadataLinux(self): """Check google cloud metadata on linux.""" fixture_test_lib.ClientFixture(self.client_id, token=self.token) self.SetupClients(1, system="Linux", os_version="12.04") with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.FakeTestDataVFSHandler): with test_lib.ConfigOverrider({ "Artifacts.knowledge_base": ["LinuxWtmp", "NetgroupConfiguration", "LinuxRelease"], "Artifacts.interrogate_store_in_aff4": [], "Artifacts.netgroup_filter_regexes": [r"^login$"] }): client_mock = action_mocks.InterrogatedClient() client_mock.InitializeClient() for _ in flow_test_lib.TestFlowHelper( discovery.Interrogate.__name__, client_mock, token=self.token, client_id=self.client_id): pass self.fd = aff4.FACTORY.Open(self.client_id, token=self.token) self._CheckCloudMetadata()
def setUp(self): super(TestHostInformation, self).setUp() self.client_id = "C.0000000000000001" fixture_test_lib.ClientFixture(self.client_id, token=self.token) self.RequestAndGrantClientApproval(self.client_id) with test_lib.FakeTime(gui_test_lib.TIME_0): with aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) as fd: fd.Set(fd.Schema.OS_VERSION, rdf_client.VersionString("6.1.7000")) fd.Set(fd.Schema.HOSTNAME("Hostname T0")) fd.Set(fd.Schema.MEMORY_SIZE(4294967296)) with test_lib.FakeTime(gui_test_lib.TIME_1): with aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) as fd: fd.Set(fd.Schema.OS_VERSION, rdf_client.VersionString("6.1.8000")) fd.Set(fd.Schema.HOSTNAME("Hostname T1")) fd.Set(fd.Schema.MEMORY_SIZE(8589934592)) with test_lib.FakeTime(gui_test_lib.TIME_2): with aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) as fd: fd.Set(fd.Schema.OS_VERSION, rdf_client.VersionString("7.0.0000")) fd.Set(fd.Schema.HOSTNAME("Hostname T2")) fd.Set(fd.Schema.MEMORY_SIZE(12884901888))
def SetupTestTimeline(self): client_id = self.SetupClient(0) fixture_test_lib.ClientFixture(client_id, token=self.token) # Choose some directory with pathspec in the ClientFixture. self.category_path = "fs/os" self.folder_path = self.category_path + "/Users/中国新闻网新闻中/Shared" self.file_path = self.folder_path + "/a.txt" file_urn = client_id.Add(self.file_path) for i in range(0, 5): with test_lib.FakeTime(i): stat_entry = rdf_client.StatEntry() stat_entry.st_mtime = rdfvalue.RDFDatetimeSeconds.Now() stat_entry.pathspec.path = self.file_path[len(self. category_path):] stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS hash_entry = rdf_crypto.Hash(sha256=( "0e8dc93e150021bb4752029ebbff51394aa36f069cf19901578" "e4f06017acdb5").decode("hex")) with aff4.FACTORY.Create(file_urn, aff4_grr.VFSFile, mode="w", token=self.token) as fd: fd.Set(fd.Schema.STAT, stat_entry) fd.Set(fd.Schema.HASH, hash_entry) if data_store.RelationalDBWriteEnabled(): cid = client_id.Basename() path_info = rdf_objects.PathInfo.FromStatEntry(stat_entry) path_info.hash_entry = hash_entry data_store.REL_DB.WritePathInfos(cid, [path_info]) return client_id
def testInterrogateCloudMetadataWindows(self): """Check google cloud metadata on windows.""" fixture_test_lib.ClientFixture(self.client_id, token=self.token) self.SetupClients(1, system="Windows", os_version="6.2", arch="AMD64") with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.REGISTRY, vfs_test_lib.FakeRegistryVFSHandler): with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.FakeFullVFSHandler): client_mock = action_mocks.InterrogatedClient() client_mock.InitializeClient(system="Windows", version="6.1.7600", kernel="6.1.7601") with mock.patch.object(platform, "system", return_value="Windows"): for _ in flow_test_lib.TestFlowHelper( discovery.Interrogate.__name__, client_mock, token=self.token, client_id=self.client_id): pass self.fd = aff4.FACTORY.Open(self.client_id, token=self.token) self._CheckCloudMetadata()
def testNotificationIsSent(self): fixture_test_lib.ClientFixture(self.client_id, token=self.token) args = vfs_plugin.ApiCreateVfsRefreshOperationArgs( client_id=self.client_id, file_path=self.file_path, max_depth=0, notify_user=True) result = self.handler.Handle(args, token=self.token) if data_store.RelationalDBFlowsEnabled(): flow_test_lib.RunFlow( self.client_id, result.operation_id, check_flow_errors=False) else: # Finish flow and check if there are any new notifications. flow_urn = rdfvalue.RDFURN(result.operation_id) client_mock = action_mocks.ActionMock() flow_test_lib.TestFlowHelper( flow_urn, client_mock, client_id=self.client_id, token=self.token, check_flow_errors=False) pending_notifications = self.GetUserNotifications(self.token.username) self.assertIn("Recursive Directory Listing complete", pending_notifications[0].message) if data_store.RelationalDBReadEnabled(): self.assertEqual( pending_notifications[0].reference.vfs_file.path_components, ["Users", "Shared"]) else: self.assertEqual(pending_notifications[0].subject, self.client_id.Add(self.file_path))
def setUp(self): super(ApiClientLibVfsTest, self).setUp() self.client_urn = self.SetupClients(1)[0] fixture_test_lib.ClientFixture(self.client_urn, self.token)
def setUp(self): super().setUp() self.client_id = self.SetupClient(0) fixture_test_lib.ClientFixture(self.client_id)
def setUp(self): super(ApiClientLibVfsTest, self).setUp() self.client_id = self.SetupClient(0) fixture_test_lib.ClientFixture(self.client_id)
def setUp(self): super(ApiListFilesHandlerRegressionTest, self).setUp() self.client_id = self.SetupClients(1)[0] fixture_test_lib.ClientFixture(self.client_id, token=self.token, age=42)
def setUp(self): super(GrrKbTest, self).setUp() fixture_test_lib.ClientFixture(self.client_id, token=self.token)