def testDownloadCollectionWithFoldersEntries(self): """Check we can download a collection that also references folders.""" fd = sequential_collection.GeneralIndexedCollection(self.collection_urn) with data_store.DB.GetMutationPool() as pool: fd.Add( rdf_file_finder.FileFinderResult( stat_entry=rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile5", pathtype="OS"))), mutation_pool=pool) fd.Add( rdf_file_finder.FileFinderResult( stat_entry=rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="testdir1", pathtype="OS"), st_mode=stat.S_IFDIR)), mutation_pool=pool) with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection( self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertIn("testfile5", os.listdir(expected_outdir)) self.assertIn("testdir1", os.listdir(expected_outdir))
def testDownloadHuntResultCollection(self): """Check we can download files references in HuntResultCollection.""" # Create a collection with URNs to some files. fd = results.HuntResultCollection(self.collection_urn) with data_store.DB.GetMutationPool() as pool: fd.AddAsMessage( rdfvalue.RDFURN(self.out.Add("testfile1")), self.client_id, mutation_pool=pool) fd.AddAsMessage( rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")), self.client_id, mutation_pool=pool) fd.AddAsMessage( rdf_file_finder.FileFinderResult( stat_entry=rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile5", pathtype="OS"))), self.client_id, mutation_pool=pool) fd.AddAsMessage( collectors.ArtifactFilesDownloaderResult( downloaded_file=rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile6", pathtype="OS"))), self.client_id, mutation_pool=pool) self._VerifyDownload()
def GlobReportMatch(self, response): """This method is called by the glob mixin when there is a match.""" super(FileFinder, self).GlobReportMatch(response) self.ApplyCondition( rdf_file_finder.FileFinderResult(stat_entry=response), condition_index=0)
def testNetgroupBufferParser(self): """Ensure we can extract users from a netgroup file.""" parser = linux_file_parser.NetgroupBufferParser() buf1 = rdf_client.BufferReference( data=b"group1 (-,user1,) (-,user2,) (-,user3,)\n") buf2 = rdf_client.BufferReference( data=b"super_group3 (-,user5,) (-,user6,) group1 group2\n") ff_result = rdf_file_finder.FileFinderResult(matches=[buf1, buf2]) with test_lib.ConfigOverrider( {"Artifacts.netgroup_user_blacklist": ["user2", "user3"]}): out = list(parser.Parse(ff_result, None)) self.assertCountEqual([x.username for x in out], [u"user1", u"user5", u"user6"])
def testHuntAuthorizationIsRequiredToGenerateResultsArchive(self): stat_entry = rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] hunt_id, _ = self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('%s')" % hunt_id) self.Click("css=li[heading=Results]") self.Click("css=button.DownloadButton") self.WaitUntil(self.IsTextPresent, "Create a new approval request")
def Run(self, args): self.stat_cache = utils.StatCache() action = self._ParseAction(args) for path in GetExpandedPaths(args): self.Progress() try: matches = self._Validate(args, path) result = rdf_file_finder.FileFinderResult() result.matches = matches action.Execute(path, result) self.SendReply(result) except _SkipFileException: pass
def testPasswdBufferParser(self): """Ensure we can extract users from a passwd file.""" parser = linux_file_parser.PasswdBufferParser() buf1 = rdf_client.BufferReference( data=b"user1:x:1000:1000:User1 Name,,,:/home/user1:/bin/bash\n") buf2 = rdf_client.BufferReference( data=b"user2:x:1000:1000:User2 Name,,,:/home/user2:/bin/bash\n") ff_result = rdf_file_finder.FileFinderResult(matches=[buf1, buf2]) out = list(parser.Parse(ff_result, None)) self.assertLen(out, 2) self.assertIsInstance(out[1], rdf_client.User) self.assertIsInstance(out[1], rdf_client.User) self.assertEqual(out[0].username, "user1") self.assertEqual(out[0].full_name, "User1 Name,,,")
def testShowsPerFileDownloadButtonForFileFinderHunt(self): stat_entry = rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] hunt_id, _ = self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('%s')" % hunt_id) self.Click("css=li[heading=Results]") self.WaitUntil( self.IsElementPresent, "css=grr-results-collection button:has(span.glyphicon-download)")
def testShowsGenerateArchiveButtonForFileFinderHunt(self): stat_entry = rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "Files referenced in this collection can be downloaded")
def _AddTestData(self, fd): with data_store.DB.GetMutationPool() as pool: fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1")), mutation_pool=pool) fd.Add(rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")), mutation_pool=pool) fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec( path="testfile5", pathtype="OS"))), mutation_pool=pool) fd.Add(collectors.ArtifactFilesDownloaderResult( downloaded_file=rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile6", pathtype="OS"))), mutation_pool=pool)
def testExportCommandIsShownForStatEntryResults(self): stat_entry = rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] hunt_urn = self.CreateGenericHuntWithCollection(values=values) self.Open("/#/hunts/%s/results" % hunt_urn.Basename()) self.Click("link=Show export command") self.WaitUntil( self.IsTextPresent, "/usr/bin/grr_api_shell 'http://localhost:8000/' " "--exec_code 'grrapi.Hunt(\"%s\").GetFilesArchive()." "WriteToFile(\"./hunt_results_%s.zip\")'" % (hunt_urn.Basename(), hunt_urn.Basename().replace(":", "_")))
def testGenerateZipButtonGetsDisabledAfterClick(self): stat_entry = rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS)) values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)] hunt_urn = self.CreateGenericHuntWithCollection(values=values) self.RequestAndGrantHuntApproval(hunt_urn.Basename()) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.Click("css=button.DownloadButton") self.WaitUntil(self.IsElementPresent, "css=button.DownloadButton[disabled]") self.WaitUntil(self.IsTextPresent, "Generation has started")
def testFileFinderResultExportConverterConvertsBufferRefsWithoutPathspecs( self): pathspec = rdf_paths.PathSpec(path="/some/path", pathtype=rdf_paths.PathSpec.PathType.OS) match1 = rdf_client.BufferReference(offset=42, length=43, data=b"somedata1") match2 = rdf_client.BufferReference(offset=44, length=45, data=b"somedata2") stat_entry = rdf_client_fs.StatEntry(pathspec=pathspec, st_mode=33184, st_ino=1063090, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892, st_btime=1313131313) file_finder_result = rdf_file_finder.FileFinderResult( stat_entry=stat_entry, matches=[match1, match2]) converter = file.FileFinderResultConverter() results = list(converter.Convert(self.metadata, file_finder_result)) # We expect 2 ExportedMatch instances in the results exported_matches = [ result for result in results if isinstance(result, buffer_reference.ExportedMatch) ] exported_matches = sorted(exported_matches, key=lambda x: x.offset) self.assertLen(exported_matches, 2) self.assertEqual(exported_matches[0].offset, 42) self.assertEqual(exported_matches[0].length, 43) self.assertEqual(exported_matches[0].data, b"somedata1") self.assertEqual(exported_matches[0].urn, "aff4:/%s/fs/os/some/path" % self.client_id) self.assertEqual(exported_matches[1].offset, 44) self.assertEqual(exported_matches[1].length, 45) self.assertEqual(exported_matches[1].data, b"somedata2") self.assertEqual(exported_matches[1].urn, "aff4:/%s/fs/os/some/path" % self.client_id)
def Run(self, args): if args.pathtype != rdf_paths.PathSpec.PathType.OS: raise ValueError( "FileFinderOS can only be used with OS paths, got {}".format( args.pathspec)) self.stat_cache = filesystem.StatCache() action = self._ParseAction(args) for path in GetExpandedPaths(args): self.Progress() try: matches = self._Validate(args, path) result = rdf_file_finder.FileFinderResult() result.matches = matches action.Execute(path, result) self.SendReply(result) except _SkipFileException: pass
def testDownloadCollectionWithFlattenOption(self): """Check we can download files references in a collection.""" # Create a collection with URNs to some files. fd = sequential_collection.GeneralIndexedCollection(self.collection_urn) with data_store.DB.GetMutationPool() as pool: fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1")), mutation_pool=pool) fd.Add( rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")), mutation_pool=pool) fd.Add( rdf_file_finder.FileFinderResult( stat_entry=rdf_client_fs.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile5", pathtype="OS"))), mutation_pool=pool) with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection( self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, flatten=True, token=self.token, max_threads=2) # Check that "files" folder is filled with symlinks to downloaded files. symlinks = os.listdir(os.path.join(tmpdir, "files")) self.assertLen(symlinks, 3) self.assertListEqual( sorted(symlinks), [ "C.1000000000000000_fs_os_testfile1", "C.1000000000000000_fs_os_testfile2", "C.1000000000000000_fs_os_testfile5" ]) self.assertEqual( os.readlink( os.path.join(tmpdir, "files", "C.1000000000000000_fs_os_testfile1")), os.path.join(tmpdir, "C.1000000000000000", "fs", "os", "testfile1"))
def __call__(self, stat_entry: rdf_client_fs.StatEntry, fd: vfs.VFSHandler) -> rdf_file_finder.FileFinderResult: return rdf_file_finder.FileFinderResult(stat_entry=stat_entry)
def testFileFinderResultExportConverterConvertsHashes(self): pathspec = rdf_paths.PathSpec(path="/some/path", pathtype=rdf_paths.PathSpec.PathType.OS) pathspec2 = rdf_paths.PathSpec(path="/some/path2", pathtype=rdf_paths.PathSpec.PathType.OS) sha256 = binascii.unhexlify( "0e8dc93e150021bb4752029ebbff51394aa36f069cf19901578e4f06017acdb5") sha1 = binascii.unhexlify("7dd6bee591dfcb6d75eb705405302c3eab65e21a") md5 = binascii.unhexlify("bb0a15eefe63fd41f8dc9dee01c5cf9a") pecoff_md5 = binascii.unhexlify( "7dd6bee591dfcb6d75eb705405302c3eab65e21a") pecoff_sha1 = binascii.unhexlify( "7dd6bee591dfcb6d75eb705405302c3eab65e21a") stat_entry = rdf_client_fs.StatEntry(pathspec=pathspec, st_mode=33184, st_ino=1063090, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892, st_btime=1331133113) hash_entry = rdf_crypto.Hash(sha256=sha256, sha1=sha1, md5=md5, pecoff_md5=pecoff_md5, pecoff_sha1=pecoff_sha1) sha256 = binascii.unhexlify( "9e8dc93e150021bb4752029ebbff51394aa36f069cf19901578e4f06017acdb5") sha1 = binascii.unhexlify("6dd6bee591dfcb6d75eb705405302c3eab65e21a") md5 = binascii.unhexlify("8b0a15eefe63fd41f8dc9dee01c5cf9a") pecoff_md5 = binascii.unhexlify( "1dd6bee591dfcb6d75eb705405302c3eab65e21a") pecoff_sha1 = binascii.unhexlify( "1dd6bee591dfcb6d75eb705405302c3eab65e21a") stat_entry2 = rdf_client_fs.StatEntry(pathspec=pathspec2, st_mode=33184, st_ino=1063090, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892, st_btime=1331331331) hash_entry2 = rdf_crypto.Hash(sha256=sha256, sha1=sha1, md5=md5, pecoff_md5=pecoff_md5, pecoff_sha1=pecoff_sha1) file_finder_result = rdf_file_finder.FileFinderResult( stat_entry=stat_entry, hash_entry=hash_entry) file_finder_result2 = rdf_file_finder.FileFinderResult( stat_entry=stat_entry2, hash_entry=hash_entry2) converter = file.FileFinderResultConverter() results = list( converter.BatchConvert([(self.metadata, file_finder_result), (self.metadata, file_finder_result2)])) exported_files = [ result for result in results if isinstance(result, file.ExportedFile) ] self.assertLen(exported_files, 2) self.assertCountEqual([x.basename for x in exported_files], ["path", "path2"]) for export_result in exported_files: if export_result.basename == "path": self.assertEqual( export_result.hash_sha256, "0e8dc93e150021bb4752029ebbff51394aa36f069cf19901578e4" "f06017acdb5") self.assertEqual(export_result.hash_sha1, "7dd6bee591dfcb6d75eb705405302c3eab65e21a") self.assertEqual(export_result.hash_md5, "bb0a15eefe63fd41f8dc9dee01c5cf9a") self.assertEqual(export_result.pecoff_hash_md5, "7dd6bee591dfcb6d75eb705405302c3eab65e21a") self.assertEqual(export_result.pecoff_hash_sha1, "7dd6bee591dfcb6d75eb705405302c3eab65e21a") elif export_result.basename == "path2": self.assertEqual(export_result.basename, "path2") self.assertEqual( export_result.hash_sha256, "9e8dc93e150021bb4752029ebbff51394aa36f069cf19901578e4" "f06017acdb5") self.assertEqual(export_result.hash_sha1, "6dd6bee591dfcb6d75eb705405302c3eab65e21a") self.assertEqual(export_result.hash_md5, "8b0a15eefe63fd41f8dc9dee01c5cf9a") self.assertEqual(export_result.pecoff_hash_md5, "1dd6bee591dfcb6d75eb705405302c3eab65e21a") self.assertEqual(export_result.pecoff_hash_sha1, "1dd6bee591dfcb6d75eb705405302c3eab65e21a")
def testFileFinderResultExportConverter(self): pathspec = rdf_paths.PathSpec(path="/some/path", pathtype=rdf_paths.PathSpec.PathType.OS) match1 = rdf_client.BufferReference(offset=42, length=43, data=b"somedata1", pathspec=pathspec) match2 = rdf_client.BufferReference(offset=44, length=45, data=b"somedata2", pathspec=pathspec) stat_entry = rdf_client_fs.StatEntry(pathspec=pathspec, st_mode=33184, st_ino=1063090, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892, st_btime=1313131313) file_finder_result = rdf_file_finder.FileFinderResult( stat_entry=stat_entry, matches=[match1, match2]) converter = file.FileFinderResultConverter() results = list(converter.Convert(self.metadata, file_finder_result)) # We expect 1 ExportedFile instance in the results exported_files = [ result for result in results if isinstance(result, file.ExportedFile) ] self.assertLen(exported_files, 1) self.assertEqual(exported_files[0].basename, "path") self.assertEqual(exported_files[0].urn, "aff4:/%s/fs/os/some/path" % self.client_id) self.assertEqual(exported_files[0].st_mode, 33184) self.assertEqual(exported_files[0].st_ino, 1063090) self.assertEqual(exported_files[0].st_atime, 1336469177) self.assertEqual(exported_files[0].st_mtime, 1336129892) self.assertEqual(exported_files[0].st_ctime, 1336129892) self.assertEqual(exported_files[0].st_btime, 1313131313) self.assertFalse(exported_files[0].HasField("content")) self.assertFalse(exported_files[0].HasField("content_sha256")) self.assertFalse(exported_files[0].HasField("hash_md5")) self.assertFalse(exported_files[0].HasField("hash_sha1")) self.assertFalse(exported_files[0].HasField("hash_sha256")) # We expect 2 ExportedMatch instances in the results exported_matches = [ result for result in results if isinstance(result, buffer_reference.ExportedMatch) ] exported_matches = sorted(exported_matches, key=lambda x: x.offset) self.assertLen(exported_matches, 2) self.assertEqual(exported_matches[0].offset, 42) self.assertEqual(exported_matches[0].length, 43) self.assertEqual(exported_matches[0].data, b"somedata1") self.assertEqual(exported_matches[0].urn, "aff4:/%s/fs/os/some/path" % self.client_id) self.assertEqual(exported_matches[1].offset, 44) self.assertEqual(exported_matches[1].length, 45) self.assertEqual(exported_matches[1].data, b"somedata2") self.assertEqual(exported_matches[1].urn, "aff4:/%s/fs/os/some/path" % self.client_id) # Also test registry entries. data = rdf_protodict.DataBlob() data.SetValue(b"testdata") stat_entry = rdf_client_fs.StatEntry( registry_type="REG_SZ", registry_data=data, pathspec=rdf_paths.PathSpec(path="HKEY_USERS/S-1-1-1-1/Software", pathtype="REGISTRY")) file_finder_result = rdf_file_finder.FileFinderResult( stat_entry=stat_entry) converter = file.FileFinderResultConverter() results = list(converter.Convert(self.metadata, file_finder_result)) self.assertLen(results, 1) self.assertIsInstance(results[0], file.ExportedRegistryKey) result = results[0] self.assertEqual(result.data, b"testdata") self.assertEqual( result.urn, "aff4:/%s/registry/HKEY_USERS/S-1-1-1-1/Software" % self.client_id)
def __call__(self, stat_entry, fd): return rdf_file_finder.FileFinderResult(stat_entry=stat_entry)