def testDownloadCollectionWithFoldersEntries(self): """Check we can download RDFValueCollection that also references folders.""" fd = aff4.FACTORY.Create("aff4:/testcoll", "RDFValueCollection", token=self.token) fd.Add( file_finder.FileFinderResult(stat_entry=rdfvalue.StatEntry( aff4path=self.out.Add("testfile5")))) fd.Add( file_finder.FileFinderResult(stat_entry=rdfvalue.StatEntry( aff4path=self.out.Add("testdir1"), st_mode=stat.S_IFDIR))) fd.Close() with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection("aff4:/testcoll", tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile5" in os.listdir(expected_outdir)) self.assertTrue("testdir1" in os.listdir(expected_outdir))
def testDownloadCollectionWithFlattenOption(self): """Check we can download files references in RDFValueCollection.""" # Create a collection with URNs to some files. fd = aff4.FACTORY.Create("aff4:/testcoll", "RDFValueCollection", token=self.token) fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1"))) fd.Add(rdfvalue.StatEntry(aff4path=self.out.Add("testfile2"))) fd.Add(rdfvalue.FileFinderResult( stat_entry=rdfvalue.StatEntry(aff4path=self.out.Add("testfile5")))) fd.Close() with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection("aff4:/testcoll", tmpdir, overwrite=True, dump_client_info=True, flatten=True, token=self.token, max_threads=2) # Check that "files" folder is filled with symlinks to downloaded files. symlinks = os.listdir(os.path.join(tmpdir, "files")) self.assertEqual(len(symlinks), 3) self.assertListEqual(sorted(symlinks), ["C.1000000000000000_fs_os_testfile1", "C.1000000000000000_fs_os_testfile2", "C.1000000000000000_fs_os_testfile5"]) self.assertEqual(os.readlink( os.path.join(tmpdir, "files", "C.1000000000000000_fs_os_testfile1")), os.path.join(tmpdir, "C.1000000000000000", "fs", "os", "testfile1"))
def testOSXUsersParser(self): """Ensure we can extract users from a passwd file.""" paths = ["/Users/user1", "/Users/user2", "/Users/Shared"] statentries = [] client = "C.1000000000000000" for path in paths: statentries.append( rdfvalue.StatEntry( aff4path=rdfvalue.ClientURN(client).Add("fs/os").Add(path), pathspec=rdfvalue.PathSpec( path=path, pathtype=rdfvalue.PathSpec.PathType.OS), st_mode=16877)) statentries.append( rdfvalue.StatEntry(aff4path=rdfvalue.ClientURN(client).Add( "fs/os").Add("/Users/.localized"), pathspec=rdfvalue.PathSpec( path="/Users/.localized", pathtype=rdfvalue.PathSpec.PathType.OS), st_mode=33261)) parser = osx_file_parser.OSXUsersParser() out = list(parser.Parse(statentries, None, None)) self.assertItemsEqual([x.username for x in out], ["user1", "user2"]) self.assertItemsEqual([x.homedir for x in out], ["/Users/user1", "/Users/user2"])
def testDownloadCollection(self): """Check we can download files references in RDFValueCollection.""" # Create a collection with URNs to some files. fd = aff4.FACTORY.Create("aff4:/testcoll", "RDFValueCollection", token=self.token) fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1"))) fd.Add(rdfvalue.StatEntry(aff4path=self.out.Add("testfile2"))) fd.Add( file_finder.FileFinderResult(stat_entry=rdfvalue.StatEntry( aff4path=self.out.Add("testfile5")))) fd.Close() with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection("aff4:/testcoll", tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile1" in os.listdir(expected_outdir)) self.assertTrue("testfile2" in os.listdir(expected_outdir)) self.assertTrue("testfile5" in os.listdir(expected_outdir)) # Check we dumped a YAML file to the root of the client. expected_rootdir = os.path.join(tmpdir, self.client_id.Basename()) self.assertTrue("client_info.yaml" in os.listdir(expected_rootdir))
def ListFiles(self): """A generator of all keys and values.""" if not self.IsDirectory(): return if self.hive is None: for name in dir(_winreg): if name.startswith("HKEY_"): response = rdfvalue.StatEntry(st_mode=stat.S_IFDIR) response_pathspec = self.pathspec.Copy() response_pathspec.last.path = utils.JoinPath( response_pathspec.last.path, name) response.pathspec = response_pathspec yield response return try: with OpenKey(self.hive, CanonicalPathToLocalPath(self.key_name)) as key: (self.number_of_keys, self.number_of_values, self.last_modified) = QueryInfoKey(key) self.last_modified = self.last_modified / 10000000 - WIN_UNIX_DIFF_MSECS # First keys - These will look like directories. for i in range(self.number_of_keys): try: name = EnumKey(key, i) response = rdfvalue.StatEntry( # Keys look like Directories in the VFS. st_mode=stat.S_IFDIR, st_mtime=self.last_modified) response_pathspec = self.pathspec.Copy() response_pathspec.last.path = utils.JoinPath( response_pathspec.last.path, name) response.pathspec = response_pathspec yield response except exceptions.WindowsError: pass # Now Values - These will look like files. for i in range(self.number_of_values): try: name, value, value_type = EnumValue(key, i) response = self._Stat(name, value, value_type) # Values look like files in the VFS. response.st_mode = stat.S_IFREG yield response except exceptions.WindowsError: pass except exceptions.WindowsError as e: raise IOError("Unable to list key %s: %s" % (self.key_name, e))
def Start(self): """Issue the find request.""" self.state.Register("files_to_fetch", []) self.state.Register("files_found", 0) self.state.Register("sorted_filters", sorted(self.args.filters, key=self._FilterWeight)) if self.args.pathtype == rdfvalue.PathSpec.PathType.MEMORY: # We construct StatEntries ourselves and there's no way they can # pass the file type check. self.args.no_file_type_check = True # If pathtype is MEMORY, we're treating provided paths not as globs, # but as paths to memory devices. memory_devices = [] for path in self.args.paths: pathspec = rdfvalue.PathSpec( path=utils.SmartUnicode(path), pathtype=rdfvalue.PathSpec.PathType.MEMORY) aff4path = aff4.AFF4Object.VFSGRRClient.PathspecToURN( pathspec, self.client_id) stat_entry = rdfvalue.StatEntry(aff4path=aff4path, pathspec=pathspec) memory_devices.append(stat_entry) self.CallStateInline(messages=memory_devices, next_state="ProcessFilters") else: self.CallFlow("Glob", next_state="ProcessFilters", paths=self.args.paths, pathtype=self.args.pathtype)
def ListDirectory(self, list_directory_request): """A mock list directory.""" pathspec = list_directory_request.pathspec if not pathspec: raise RuntimeError("Missing pathspec.") if (pathspec.path != r"\\.\HarddiskVolumeShadowCopy3" or pathspec.pathtype != rdfvalue.PathSpec.PathType.OS): raise RuntimeError("Invalid pathspec.") if not pathspec.nested_path: raise RuntimeError("Missing nested pathspec.") if (pathspec.nested_path.path != "/" or pathspec.nested_path.pathtype != rdfvalue.PathSpec.PathType.TSK): raise RuntimeError("Invalid nested pathspec.") result = [] for i in range(10): mock_pathspec = pathspec.Copy() mock_pathspec.last.path = "/file %s" % i result.append(rdfvalue.StatEntry(pathspec=mock_pathspec, st_mode=stat.S_IFDIR)) return result
def testStartsZipGenerationWhenGenerateZipButtonIsClicked(self): stat_entry = rdfvalue.StatEntry(aff4path="aff4:/foo/bar") values = [rdfvalue.FileFinderResult(stat_entry=stat_entry)] with self.ACLChecksDisabled(): hunt_urn = self.CreateGenericHuntWithCollection(values=values) self.GrantHuntApproval(hunt_urn) self.Open("/") self.Click("css=a[grrtarget=ManageHunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") # Using :visible selector as we don't know which button (ZIP or TAR.GZ) will # be shown - it depends on the platform. self.Click("css=button.DownloadButton:visible") self.WaitUntil(self.IsTextPresent, "Generation has started") with self.ACLChecksDisabled(): flows_dir = aff4.FACTORY.Open("aff4:/flows") flows = list(flows_dir.OpenChildren()) export_flows = [ f for f in flows if f.__class__.__name__ == "ExportHuntResultFilesAsArchive"] self.assertEqual(len(export_flows), 1) self.assertEqual(export_flows[0].args.hunt_urn, hunt_urn)
def CreateTimeline(self): """Populate the timeline with the MAC data.""" child_urns = self._ListVFSChildren([self.state.urn]) attribute = aff4.Attribute.GetAttributeByName("stat") for subject, values in data_store.DB.MultiResolveRegex( child_urns, attribute.predicate, token=self.token, limit=10000000): for _, serialized, _ in values: stat = rdfvalue.StatEntry(serialized) event = rdfvalue.Event(source=utils.SmartUnicode(subject), stat=stat) # Add a new event for each MAC time if it exists. for c in "mac": timestamp = getattr(stat, "st_%stime" % c) if timestamp is not None: event.timestamp = timestamp * 1000000 event.type = "file.%stime" % c # We are taking about the file which is a direct child of the # source. event.subject = utils.SmartUnicode(subject) if self.runner.output is not None: self.runner.output.AddEvent(event)
def Stat(self, responses): """Fix up the pathspec of the file.""" response = responses.First() if responses.success and response: self.state.stat = response self.args.pathspec = self.state.stat.pathspec else: if not self.args.ignore_stat_failure: raise IOError("Error: %s" % responses.status) # Just fill up a bogus stat entry. self.state.stat = rdfvalue.StatEntry(pathspec=self.args.pathspec) # Adjust the size from st_size if read length is not specified. if self.args.read_length == 0: self.state.file_size = self.state.stat.st_size else: self.state.file_size = self.args.read_length self.state.max_chunk_number = (self.state.file_size / self.CHUNK_SIZE) + 1 self.CreateBlobImage() self.FetchWindow( min(self.WINDOW_SIZE, self.state.max_chunk_number - self.state.current_chunk_number))
def testRDFValueParsing(self): stat = rdfvalue.StatEntry.protobuf(st_mode=16877) data = stat.SerializeToString() result = rdfvalue.StatEntry(data) self.assertTrue(isinstance(result.st_mode, rdfvalue.StatMode))
def testFileViewHasResultsTabForRDFValueCollection(self): collection_urn = "aff4:/C.0000000000000001/analysis/SomeFlow/results" with self.ACLChecksDisabled(): with aff4.FACTORY.Create(collection_urn, "RDFValueCollection", token=self.token) as fd: fd.Add(rdfvalue.StatEntry(aff4path="aff4:/some/unique/path")) self.GrantClientApproval("C.0000000000000001") self.Open("/#c=C.0000000000000001") self.Click("css=a:contains('Browse Virtual Filesystem')") self.Click("css=li[path='/analysis'] > a") self.Click("css=li[path='/analysis/SomeFlow'] > a") self.Click("css=tr:contains('results')") # The Results tab should appear and there should be no HexView and TextView # and Download tabs. self.WaitUntil(self.IsElementPresent, "css=#Results") self.WaitUntilNot(self.IsElementPresent, "css=#DownloadView") self.WaitUntilNot(self.IsElementPresent, "css=#FileTextViewer") self.WaitUntilNot(self.IsElementPresent, "css=#FileHexViewer") # Click on the Results tab and check that the StatEntry we added before is # there. self.Click("css=#Results") self.WaitUntil(self.IsTextPresent, "aff4:/some/unique/path")
def MakeStatResponse(st, pathspec): """Creates a StatResponse proto.""" response = rdfvalue.StatEntry(pathspec=pathspec) if st is None: # Special case empty stat if we don't have a real value, e.g. we get Access # denied when stating a file. We still want to give back a value so we let # the defaults from the proto pass through. pass else: # Now fill in the stat value for attr in [ "st_mode", "st_ino", "st_dev", "st_nlink", "st_uid", "st_gid", "st_size", "st_atime", "st_mtime", "st_ctime", "st_blocks", "st_blksize", "st_rdev" ]: try: value = long(getattr(st, attr)) if value < 0: value &= 0xFFFFFFFF setattr(response, attr, value) except AttributeError: pass return response
def testStatEntryToExportedRegistryKeyConverter(self): stat = rdfvalue.StatEntry( aff4path=rdfvalue.RDFURN( "aff4:/C.0000000000000000/registry/HKEY_USERS/S-1-5-20/Software/" "Microsoft/Windows/CurrentVersion/Run/Sidebar"), st_mode=32768, st_size=51, st_mtime=1247546054, registry_type=rdfvalue.StatEntry.RegistryType.REG_EXPAND_SZ, pathspec=rdfvalue.PathSpec( path="/HKEY_USERS/S-1-5-20/Software/Microsoft/Windows/" "CurrentVersion/Run/Sidebar", pathtype=rdfvalue.PathSpec.PathType.REGISTRY), registry_data=rdfvalue.DataBlob(string="Sidebar.exe")) converter = export.StatEntryToExportedRegistryKeyConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), stat, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].urn, rdfvalue.RDFURN( "aff4:/C.0000000000000000/registry/HKEY_USERS/S-1-5-20/Software/" "Microsoft/Windows/CurrentVersion/Run/Sidebar")) self.assertEqual(results[0].last_modified, rdfvalue.RDFDatetimeSeconds(1247546054)) self.assertEqual(results[0].type, rdfvalue.StatEntry.RegistryType.REG_EXPAND_SZ) self.assertEqual(results[0].data, "Sidebar.exe")
def testRDFURNConverterWithURNPointingToCollection(self): urn = rdfvalue.RDFURN("aff4:/C.00000000000000/some/collection") fd = aff4.FACTORY.Create(urn, "RDFValueCollection", token=self.token) fd.Add(rdfvalue.StatEntry( aff4path=rdfvalue.RDFURN("aff4:/C.00000000000000/some/path"), pathspec=rdfvalue.PathSpec(path="/some/path", pathtype=rdfvalue.PathSpec.PathType.OS), st_mode=33184, st_ino=1063090, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892)) fd.Close() converter = export.RDFURNConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), urn, token=self.token)) self.assertTrue(len(results)) exported_files = [r for r in results if r.__class__.__name__ == "ExportedFile"] self.assertEqual(len(exported_files), 1) exported_file = exported_files[0] self.assertTrue(exported_file) self.assertEqual(exported_file.urn, rdfvalue.RDFURN("aff4:/C.00000000000000/some/path"))
def Start(self): """Issue the find request.""" super(FileFinder, self).Start() if not self.args.paths: # Nothing to do. return self.state.Register("files_found", 0) self.state.Register("sorted_conditions", sorted(self.args.conditions, key=self._ConditionWeight)) if self.args.pathtype in (rdfvalue.PathSpec.PathType.MEMORY, rdfvalue.PathSpec.PathType.REGISTRY): # Memory and Registry StatEntries won't pass the file type check. self.args.no_file_type_check = True if self.args.pathtype == rdfvalue.PathSpec.PathType.MEMORY: # If pathtype is MEMORY, we're treating provided paths not as globs, # but as paths to memory devices. for path in self.args.paths: pathspec = rdfvalue.PathSpec( path=utils.SmartUnicode(path), pathtype=rdfvalue.PathSpec.PathType.MEMORY) aff4path = aff4.AFF4Object.VFSGRRClient.PathspecToURN( pathspec, self.client_id) stat_entry = rdfvalue.StatEntry(aff4path=aff4path, pathspec=pathspec) self.ApplyCondition(rdfvalue.FileFinderResult(stat_entry=stat_entry), condition_index=0) else: self.GlobForPaths(self.args.paths, pathtype=self.args.pathtype, no_file_type_check=self.args.no_file_type_check)
def testStatEntryToExportedFileConverterWithMissingAFF4File(self): stat = rdfvalue.StatEntry( aff4path=rdfvalue.RDFURN("aff4:/C.00000000000000/fs/os/some/path"), pathspec=rdfvalue.PathSpec(path="/some/path", pathtype=rdfvalue.PathSpec.PathType.OS), st_mode=33184, st_ino=1063090, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892) converter = export.StatEntryToExportedFileConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), stat, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].basename, "path") self.assertEqual(results[0].urn, rdfvalue.RDFURN("aff4:/C.00000000000000/fs/os/some/path")) self.assertEqual(results[0].st_mode, 33184) self.assertEqual(results[0].st_ino, 1063090) self.assertEqual(results[0].st_atime, 1336469177) self.assertEqual(results[0].st_mtime, 1336129892) self.assertEqual(results[0].st_ctime, 1336129892) self.assertFalse(results[0].HasField("content")) self.assertFalse(results[0].HasField("content_sha256")) self.assertFalse(results[0].HasField("hash_md5")) self.assertFalse(results[0].HasField("hash_sha1")) self.assertFalse(results[0].HasField("hash_sha256"))
def Stat(self, responses): """Fix up the pathspec of the file.""" response = responses.First() if responses.success and response: self.state.stat = response # TODO(user): This is a workaround for broken clients sending back # empty pathspecs for pathtype MEMORY. Not needed for clients > 3.0.0.5. if self.state.stat.pathspec.path: self.args.pathspec = self.state.stat.pathspec else: if not self.args.ignore_stat_failure: raise IOError("Error: %s" % responses.status) # Just fill up a bogus stat entry. self.state.stat = rdfvalue.StatEntry(pathspec=self.args.pathspec) # Adjust the size from st_size if read length is not specified. if self.args.read_length == 0: self.state.file_size = self.state.stat.st_size else: self.state.file_size = self.args.read_length self.state.max_chunk_number = (self.state.file_size / self.CHUNK_SIZE) + 1 self.CreateBlobImage() self.FetchWindow( min(self.WINDOW_SIZE, self.state.max_chunk_number - self.state.current_chunk_number))
def StatFile(self, list_dir_req): if list_dir_req.pathspec.path == "/proc/kcore": result = rdfvalue.StatEntry(pathspec=list_dir_req.pathspec, st_mode=400) status = rdfvalue.GrrStatus( status=rdfvalue.GrrStatus.ReturnedStatus.OK) return [result, status] raise IOError("Not found.")
def GenTestData(paths, data): stats = [] files = [] for path in paths: p = rdfvalue.PathSpec(path=path) stats.append(rdfvalue.StatEntry(pathspec=p)) for val in data: files.append(StringIO.StringIO(val)) return stats, files
def testComplexConstruction(self): """Test that we can construct RDFProtos with nested fields.""" pathspec = rdfvalue.PathSpec(path="/foobar", pathtype=rdfvalue.PathSpec.PathType.TSK) sample = rdfvalue.StatEntry(pathspec=pathspec, st_size=5) self.assertEqual(sample.pathspec.path, "/foobar") self.assertEqual(sample.st_size, 5) self.assertRaises(AttributeError, rdfvalue.StatEntry, foobar=1)
def testCSVPluginWithValuesOfSameType(self): responses = [] for i in range(10): responses.append( rdfvalue.StatEntry( aff4path=self.client_id.Add("/fs/os/foo/bar").Add(str(i)), pathspec=rdfvalue.PathSpec(path="/foo/bar"), st_mode=33184, st_ino=1063090, st_dev=64512L, st_nlink=1 + i, st_uid=139592, st_gid=5000, st_size=0, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892)) streams = self.ProcessResponses( plugin_args=rdfvalue.CSVOutputPluginArgs(), responses=responses) self.assertEqual(streams.keys(), ["ExportedFile.csv"]) self.assertEqual(streams["ExportedFile.csv"].urn, rdfvalue.RDFURN("aff4:/foo/bar/ExportedFile.csv")) contents = StringIO.StringIO(streams["ExportedFile.csv"].Read(16384)) parsed_output = list(csv.DictReader(contents)) self.assertEqual(len(parsed_output), 10) for i in range(10): self.assertEqual(parsed_output[i]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[i]["metadata.hostname"], "Host-0") self.assertEqual(parsed_output[i]["metadata.mac_address"], "aabbccddee00\nbbccddeeff00") self.assertEqual(parsed_output[i]["metadata.source_urn"], self.results_urn) self.assertEqual(parsed_output[i]["urn"], self.client_id.Add("/fs/os/foo/bar").Add(str(i))) self.assertEqual(parsed_output[i]["st_mode"], "33184") self.assertEqual(parsed_output[i]["st_ino"], "1063090") self.assertEqual(parsed_output[i]["st_dev"], "64512") self.assertEqual(parsed_output[i]["st_nlink"], str(1 + i)) self.assertEqual(parsed_output[i]["st_uid"], "139592") self.assertEqual(parsed_output[i]["st_gid"], "5000") self.assertEqual(parsed_output[i]["st_size"], "0") self.assertEqual(parsed_output[i]["st_atime"], "2012-05-08 09:26:17") self.assertEqual(parsed_output[i]["st_mtime"], "2012-05-04 11:11:32") self.assertEqual(parsed_output[i]["st_ctime"], "2012-05-04 11:11:32") self.assertEqual(parsed_output[i]["st_blksize"], "0") self.assertEqual(parsed_output[i]["st_rdev"], "0") self.assertEqual(parsed_output[i]["symlink"], "")
def testFileViewHasExportTabWhenCollectionHasStatEntries(self): collection_urn = "aff4:/C.0000000000000001/analysis/SomeFlow/results" with self.ACLChecksDisabled(): with aff4.FACTORY.Create(collection_urn, "RDFValueCollection", token=self.token) as fd: fd.Add(rdfvalue.StatEntry(aff4path="aff4:/some/unique/path")) self.GrantClientApproval("C.0000000000000001") self.CheckExportTabIsPresent()
def StatFile(self, args): # Make sure the flow wants to download the same file mentioned in the # WMI response. test_obj.assertTrue(args.pathspec.path, test_obj.SAMPLE_WMI_RESPONSE["PathName"]) # Return a pathspec for a file in our test_data which we can verify.. return [ rdfvalue.StatEntry(pathspec=pathspec, st_mode=33261, st_size=20746) ]
def GetParsedMultiFile(self, artifact, data, parser): stats = [] files = [] host_data = self.SetKnowledgeBase() kb = host_data["KnowledgeBase"] for path, lines in data.items(): p = rdfvalue.PathSpec(path=path) stats.append(rdfvalue.StatEntry(pathspec=p)) files.append(StringIO.StringIO(lines)) rdfs = [rdf for rdf in parser.ParseMultiple(stats, files, kb)] host_data[artifact] = rdfs return host_data
def _CreateTestData(self, testdata): """Create 'stats' and 'file_objects' lists for passing to ParseMultiple.""" stats = [] files = [] for filepath, localfile in testdata: files.append(open(localfile)) p = rdfvalue.PathSpec(path=filepath) s = rdfvalue.StatEntry(pathspec=p) stats.append(s) return stats, files
def GetParsedFile(self, artifact, data, parser): host_data = self.SetKnowledgeBase() kb = host_data["KnowledgeBase"] for path, lines in data.items(): p = rdfvalue.PathSpec(path=path) stat = rdfvalue.StatEntry(pathspec=p) file_obj = StringIO.StringIO(lines) rdfs = [rdf for rdf in parser.Parse(stat, file_obj, kb)] host_data[artifact] = rdfs # Return on the first item break return host_data
def testCSVPluginWithValuesOfMultipleTypes(self): hunt_urn, _ = self.RunHunt( plugin_args=rdfvalue.CSVOutputPluginArgs( output_dir=rdfvalue.RDFURN("aff4:/tmp/csv")), responses=[ rdfvalue.StatEntry( aff4path=self.client_id.Add("/fs/os/foo/bar"), pathspec=rdfvalue.PathSpec(path="/foo/bar")), rdfvalue.Process(pid=42) ], process_responses_separately=True) plugin_output_files = sorted( list( aff4.FACTORY.Open("aff4:/tmp/csv", token=self.token).ListChildren())) self.assertListEqual(plugin_output_files, [ rdfvalue.RDFURN("aff4:/tmp/csv/ExportedFile.csv"), rdfvalue.RDFURN("aff4:/tmp/csv/ExportedProcess.csv") ]) output_file = aff4.FACTORY.Open(plugin_output_files[0], aff4_type="AFF4Image", token=self.token) parsed_output = list( csv.DictReader(StringIO.StringIO(output_file.Read(sys.maxint)))) self.assertEqual(len(parsed_output), 1) self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0") self.assertEqual(parsed_output[0]["metadata.mac_address"], "aabbccddee00") self.assertEqual(parsed_output[0]["metadata.source_urn"], hunt_urn.Add("Results")) self.assertEqual(parsed_output[0]["urn"], self.client_id.Add("/fs/os/foo/bar")) output_file = aff4.FACTORY.Open(plugin_output_files[1], aff4_type="AFF4Image", token=self.token) parsed_output = list( csv.DictReader(StringIO.StringIO(output_file.Read(sys.maxint)))) self.assertEqual(len(parsed_output), 1) self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0") self.assertEqual(parsed_output[0]["metadata.mac_address"], "aabbccddee00") self.assertEqual(parsed_output[0]["metadata.source_urn"], hunt_urn.Add("Results")) self.assertEqual(parsed_output[0]["pid"], "42")
def _GenFiles(self, passwd, shadow, group, gshadow): stats = [] files = [] for path in ["/etc/passwd", "/etc/shadow", "/etc/group", "/etc/gshadow"]: p = rdfvalue.PathSpec(path=path) stats.append(rdfvalue.StatEntry(pathspec=p)) for data in passwd, shadow, group, gshadow: if data is None: data = [] lines = "\n".join(data).format(**self.crypt) files.append(StringIO.StringIO(lines)) return stats, files
def Find(self, args): # Make sure the flow wants to download the same file mentioned in the # WMI response. driver_name = test_obj.SAMPLE_WMI_RESPONSE["PathName"] driver_basename = driver_name.split("\\")[-1] test_obj.assertTrue(args.path_regex.Search(driver_basename)) # Return a pathspec for a file in our test_data which we can verify.. return [ rdfvalue.StatEntry(pathspec=pathspec, st_mode=33261, st_size=20746) ]