def setUp(self): super(ApiGetHuntFileHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetHuntFileHandler() self.file_path = os.path.join(self.base_path, "test.plist") self.hunt = hunts.GRRHunt.StartHunt( hunt_name="GenericHunt", flow_runner_args=rdf_flows.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=rdf_file_finder.FileFinderArgs( paths=[self.file_path], action=rdf_file_finder.FileFinderAction( action_type="DOWNLOAD"), ), client_rate=0, token=self.token) self.hunt.Run() self.results_urn = self.hunt.results_collection_urn self.aff4_file_path = "fs/os/%s" % self.file_path self.client_id = self.SetupClients(1)[0] self.AssignTasksToClients(client_ids=[self.client_id]) action_mock = action_mocks.FileFinderClientMock() test_lib.TestHuntHelper(action_mock, [self.client_id], token=self.token)
def testFileFinderWorkflowWorks(self): self.InitRouterConfig(self.__class__.FILE_FINDER_ROUTER_CONFIG % self.token.username) client_ref = self.api.Client(client_id=self.client_id.Basename()) args = file_finder.FileFinderArgs(paths=[ os.path.join(self.base_path, "test_data", "test.plist"), os.path.join(self.base_path, "test_data", "numbers.txt"), os.path.join(self.base_path, "test_data", "numbers.txt.ver2") ]).AsPrimitiveProto() flow_obj = client_ref.CreateFlow(name=file_finder.FileFinder.__name__, args=args) self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING) # Now run the flow we just started. client_id = rdf_client.ClientURN(flow_obj.client_id) flow_urn = client_id.Add("flows").Add(flow_obj.flow_id) for _ in test_lib.TestFlowHelper( flow_urn, client_id=client_id, client_mock=action_mocks.FileFinderClientMock(), token=self.token): pass # Refresh flow. flow_obj = client_ref.Flow(flow_obj.flow_id).Get() self.assertEqual(flow_obj.data.state, flow_obj.data.TERMINATED)
def setUp(self): super(TestFlowManagement, self).setUp() with self.ACLChecksDisabled(): self.client_id = rdf_client.ClientURN("C.0000000000000001") self.RequestAndGrantClientApproval(self.client_id) self.action_mock = action_mocks.FileFinderClientMock()
def testAttributesOfFileFoundInHashFileStoreAreSetCorrectly(self): client_ids = self.SetupClients(2) filename = os.path.join(self.base_path, "tcpip.sig") pathspec = rdf_paths.PathSpec( pathtype=rdf_paths.PathSpec.PathType.OS, path=filename) urn1 = aff4_grr.VFSGRRClient.PathspecToURN(pathspec, client_ids[0]) urn2 = aff4_grr.VFSGRRClient.PathspecToURN(pathspec, client_ids[1]) for client_id in client_ids: client_mock = action_mocks.FileFinderClientMock() for _ in test_lib.TestFlowHelper( file_finder.FileFinder.__name__, client_mock, token=self.token, client_id=client_id, paths=[filename], action=rdf_file_finder.FileFinderAction( action_type=rdf_file_finder.FileFinderAction.Action.DOWNLOAD)): pass # Running worker to make sure FileStore.AddFileToStore event is processed # by the worker. worker = test_lib.MockWorker(token=self.token) worker.Simulate() fd1 = aff4.FACTORY.Open(urn1, token=self.token) self.assertTrue(isinstance(fd1, aff4_grr.VFSBlobImage)) fd2 = aff4.FACTORY.Open(urn2, token=self.token) self.assertTrue(isinstance(fd2, filestore.FileStoreImage)) self.assertEqual(fd1.Get(fd1.Schema.STAT), fd2.Get(fd2.Schema.STAT)) self.assertEqual(fd1.Get(fd1.Schema.SIZE), fd2.Get(fd2.Schema.SIZE)) self.assertEqual( fd1.Get(fd1.Schema.CONTENT_LAST), fd2.Get(fd2.Schema.CONTENT_LAST))
def testGetArtifact1(self): """Test we can get a basic artifact.""" client_mock = action_mocks.FileFinderClientMock() client = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") client.Set(client.Schema.SYSTEM("Linux")) client.Flush() # Dynamically add an ArtifactSource specifying the base path. file_path = os.path.join(self.base_path, "test_img.dd") coll1 = artifact_registry.ArtifactSource( type=artifact_registry.ArtifactSource.SourceType.FILE, attributes={"paths": [file_path]}) self.fakeartifact.sources.append(coll1) artifact_list = ["FakeArtifact"] for _ in test_lib.TestFlowHelper("ArtifactCollectorFlow", client_mock, artifact_list=artifact_list, use_tsk=False, token=self.token, client_id=self.client_id): pass # Test the AFF4 file that was created. fd1 = aff4.FACTORY.Open("%s/fs/os/%s" % (self.client_id, file_path), token=self.token) fd2 = open(file_path, "rb") fd2.seek(0, 2) self.assertEqual(fd2.tell(), int(fd1.Get(fd1.Schema.SIZE)))
def setUp(self): super(TestWebHistory, self).setUp() # Set up client info self.client = aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) self.client.Set(self.client.Schema.SYSTEM("Linux")) kb = self.client.Get(self.client.Schema.KNOWLEDGE_BASE) kb.MergeOrAddUser( rdf_client.User(username="******", full_name="test user", homedir="/home/test/", last_logon=250)) self.client.Set(kb) self.client.Close() self.client_mock = action_mocks.FileFinderClientMock() # Mock the client to make it look like the root partition is mounted off the # test image. This will force all flow access to come off the image. def MockGetMountpoints(): return {"/": (os.path.join(self.base_path, "test_img.dd"), "ext2")} self.orig_linux_mp = client_utils_linux.GetMountpoints self.orig_osx_mp = client_utils_osx.GetMountpoints client_utils_linux.GetMountpoints = MockGetMountpoints client_utils_osx.GetMountpoints = MockGetMountpoints
def setUp(self): super(TestFlowNotifications, self).setUp() self.client_id = rdf_client.ClientURN("C.0000000000000001") with aff4.FACTORY.Open(self.client_id, mode="rw", token=self.token) as client: client.Set(client.Schema.HOSTNAME("HostC.0000000000000001")) self.RequestAndGrantClientApproval(self.client_id) self.action_mock = action_mocks.FileFinderClientMock()
def _CreateHuntWithDownloadedFile(self): with self.ACLChecksDisabled(): hunt = self.CreateSampleHunt( path=os.path.join(self.base_path, "test.plist"), client_count=1) action_mock = action_mocks.FileFinderClientMock() test_lib.TestHuntHelper(action_mock, self.client_ids, False, self.token) return hunt
def setUp(self): super(TestCheckFlows, self).setUp() # Only load the checks once. if self.checks_loaded is False: self.checks_loaded = self.LoadChecks() if not self.checks_loaded: raise RuntimeError("No checks to test.") test_lib.ClientFixture(self.client_id, token=self.token) self.vfs_overrider = test_lib.VFSOverrider( rdf_paths.PathSpec.PathType.OS, test_lib.FakeTestDataVFSHandler) self.vfs_overrider.Start() self.client_mock = action_mocks.FileFinderClientMock()
def testDownloadDirectorySub(self): """Test a FileFinder flow with depth=5.""" with test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, test_lib.ClientVFSHandlerFixture): # Mock the client actions FileFinder uses. client_mock = action_mocks.FileFinderClientMock() for _ in test_lib.TestFlowHelper( "FileFinder", client_mock, client_id=self.client_id, paths=["/c/Downloads/**5"], action=file_finder.FileFinderAction( action_type=file_finder.FileFinderAction.Action. DOWNLOAD), token=self.token): pass # Check if the base path was created output_path = self.client_id.Add("fs/os/c/Downloads") output_fd = aff4.FACTORY.Open(output_path, token=self.token) children = list(output_fd.OpenChildren()) # There should be 6 children: expected_children = u"a.txt b.txt c.txt d.txt sub1 中国新闻网新闻中.txt" self.assertEqual(len(children), 6) self.assertEqual( expected_children.split(), sorted([child.urn.Basename() for child in children])) # Find the child named: sub1 for child in children: if child.urn.Basename() == "sub1": break children = list(child.OpenChildren()) # There should be 4 children: a.txt, b.txt, c.txt, d.txt expected_children = "a.txt b.txt c.txt d.txt" self.assertEqual(len(children), 4) self.assertEqual( expected_children.split(), sorted([child.urn.Basename() for child in children]))
def setUp(self): super(TestWebHistoryWithArtifacts, self).setUp() self.SetupClients(1, system="Linux", os_version="12.04") fd = aff4.FACTORY.Open(self.client_id, token=self.token, mode="rw") self.kb = fd.Get(fd.Schema.KNOWLEDGE_BASE) self.kb.users.Append( rdf_client.User(username="******", full_name="test user", homedir="/home/test/", last_logon=250)) fd.AddAttribute(fd.Schema.KNOWLEDGE_BASE, self.kb) fd.Flush() self.client_mock = action_mocks.FileFinderClientMock()
def setUp(self): super(ApiGetFlowFilesArchiveHandlerTest, self).setUp() self.handler = flow_plugin.ApiGetFlowFilesArchiveHandler() self.client_id = self.SetupClients(1)[0] self.flow_urn = flow.GRRFlow.StartFlow( flow_name=file_finder.FileFinder.__name__, client_id=self.client_id, paths=[os.path.join(self.base_path, "test.plist")], action=file_finder.FileFinderAction(action_type="DOWNLOAD"), token=self.token) action_mock = action_mocks.FileFinderClientMock() for _ in test_lib.TestFlowHelper(self.flow_urn, action_mock, client_id=self.client_id, token=self.token): pass
def _testProcessCollectedArtifacts(self): client_mock = action_mocks.FileFinderClientMock() # Get KB initialized for _ in flow_test_lib.TestFlowHelper( artifact.KnowledgeBaseInitializationFlow.__name__, client_mock, client_id=self.client_id, token=self.token): pass artifact_list = ["WindowsPersistenceMechanismFiles"] with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: for _ in flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id, split_output_by_artifact=True): pass # Check MultiGetFile got called for our runkey files # TODO(user): RunKeys for S-1-5-20 are not found because users.sid only # expands to users with profiles. pathspecs = getfile_instrument.args[0][0].args.pathspecs self.assertItemsEqual([x.path for x in pathspecs], [u"C:\\Windows\\TEMP\\A.exe"]) artifact_list = ["BadPathspecArtifact"] with test_lib.Instrument(transfer.MultiGetFile, "Start") as getfile_instrument: for _ in flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, token=self.token, client_id=self.client_id, split_output_by_artifact=True): pass self.assertFalse(getfile_instrument.args)
def testDownloadDirectory(self): """Test a FileFinder flow with depth=1.""" with vfs_test_lib.VFSOverrider(rdf_paths.PathSpec.PathType.OS, vfs_test_lib.ClientVFSHandlerFixture): # Mock the client actions FileFinder uses. client_mock = action_mocks.FileFinderClientMock() for _ in flow_test_lib.TestFlowHelper( file_finder.FileFinder.__name__, client_mock, client_id=self.client_id, paths=["/c/Downloads/*"], action=rdf_file_finder.FileFinderAction( action_type=rdf_file_finder.FileFinderAction.Action. DOWNLOAD), token=self.token): pass # Check if the base path was created output_path = self.client_id.Add("fs/os/c/Downloads") output_fd = aff4.FACTORY.Open(output_path, token=self.token) children = list(output_fd.OpenChildren()) # There should be 6 children: expected_children = u"a.txt b.txt c.txt d.txt sub1 中国新闻网新闻中.txt" self.assertEqual(len(children), 6) self.assertEqual( expected_children.split(), sorted([child.urn.Basename() for child in children])) # Find the child named: a.txt for child in children: if child.urn.Basename() == "a.txt": break # Check the AFF4 type of the child, it should have changed # from VFSFile to VFSBlobImage self.assertEqual(child.__class__.__name__, "VFSBlobImage")
def setUp(self): super(ApiGetHuntFilesArchiveHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetHuntFilesArchiveHandler() self.hunt = hunts.GRRHunt.StartHunt( hunt_name="GenericHunt", flow_runner_args=rdf_flows.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=file_finder.FileFinderArgs( paths=[os.path.join(self.base_path, "test.plist")], action=file_finder.FileFinderAction(action_type="DOWNLOAD"),), client_rate=0, token=self.token) self.hunt.Run() client_ids = self.SetupClients(10) self.AssignTasksToClients(client_ids=client_ids) action_mock = action_mocks.FileFinderClientMock() test_lib.TestHuntHelper(action_mock, client_ids, token=self.token)
def ReadFromSparseImage(self, length, offset): fd = self.CreateNewSparseImage() urn = fd.urn self.client_mock = action_mocks.FileFinderClientMock() for _ in test_lib.TestFlowHelper("FetchBufferForSparseImage", self.client_mock, client_id=self.client_id, token=self.token, file_urn=urn, length=length, offset=offset): pass # Reopen the object so we can read the freshest version of the size # attribute. fd = aff4.FACTORY.Open(urn, token=self.token) return fd
def ReadTestImage(self, size_threshold): path = os.path.join(self.base_path, "test_img.dd") urn = rdfvalue.RDFURN(self.client_id.Add("fs/os").Add(path)) pathspec = rdf_paths.PathSpec(path=path, pathtype=rdf_paths.PathSpec.PathType.OS) client_mock = action_mocks.FileFinderClientMock() # Get everything as an AFF4SparseImage for _ in test_lib.TestFlowHelper("MakeNewAFF4SparseImage", client_mock, client_id=self.client_id, token=self.token, size_threshold=size_threshold, pathspec=pathspec): pass fd = aff4.FACTORY.Open(urn, token=self.token) return fd
def testFileFinderWorkflowWorks(self): self.InitRouterConfig( self.__class__.FILE_FINDER_ROUTER_CONFIG % self.token.username) client_ref = self.api.Client(client_id=self.client_id.Basename()) args = rdf_file_finder.FileFinderArgs( paths=[ os.path.join(self.base_path, "test.plist"), os.path.join(self.base_path, "numbers.txt"), os.path.join(self.base_path, "numbers.txt.ver2") ], action=rdf_file_finder.FileFinderAction( action_type=rdf_file_finder.FileFinderAction.Action.DOWNLOAD) ).AsPrimitiveProto() flow_obj = client_ref.CreateFlow( name=file_finder.FileFinder.__name__, args=args) self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING) # Now run the flow we just started. client_id = rdf_client.ClientURN(flow_obj.client_id) flow_urn = client_id.Add("flows").Add(flow_obj.flow_id) for _ in flow_test_lib.TestFlowHelper( flow_urn, client_id=client_id, client_mock=action_mocks.FileFinderClientMock(), token=self.token): pass # Refresh flow. flow_obj = client_ref.Flow(flow_obj.flow_id).Get() self.assertEqual(flow_obj.data.state, flow_obj.data.TERMINATED) # Check that we got 3 results (we downloaded 3 files). results = list(flow_obj.ListResults()) self.assertEqual(len(results), 3) # We expect results to be FileFinderResult. self.assertItemsEqual( [os.path.basename(r.payload.stat_entry.pathspec.path) for r in results], ["test.plist", "numbers.txt", "numbers.txt.ver2"]) # Now downloads the files archive. zip_stream = StringIO.StringIO() flow_obj.GetFilesArchive().WriteToStream(zip_stream) zip_fd = zipfile.ZipFile(zip_stream) # Now check that the archive has only "test.plist" file, as it's the # only file that matches the whitelist (see FILE_FINDER_ROUTER_CONFIG). # There should be 3 items in the archive: the hash of the "test.plist" # file, the symlink to this hash and the MANIFEST file. namelist = zip_fd.namelist() self.assertEqual(len(namelist), 3) # First component of every path in the archive is the containing folder, # we should strip it. namelist = [os.path.join(*n.split(os.sep)[1:]) for n in namelist] with open(os.path.join(self.base_path, "test.plist")) as test_plist_fd: test_plist_hash = hashlib.sha256(test_plist_fd.read()).hexdigest() self.assertEqual( sorted([ # pyformat: disable os.path.join(self.client_id.Basename(), "fs", "os", self.base_path.strip("/"), "test.plist"), os.path.join("hashes", test_plist_hash), "MANIFEST" # pyformat: enable ]), sorted(namelist))