def testClientSummaryToExportedNetworkInterfaceConverter(self): client_summary = rdfvalue.ClientSummary( interfaces=[rdfvalue.Interface( mac_address="123456", ifname="eth0", addresses=[ rdfvalue.NetworkAddress( address_type=rdfvalue.NetworkAddress.Family.INET, packed_bytes=socket.inet_aton("127.0.0.1"), ), rdfvalue.NetworkAddress( address_type=rdfvalue.NetworkAddress.Family.INET, packed_bytes=socket.inet_aton("10.0.0.1"), ), rdfvalue.NetworkAddress( address_type=rdfvalue.NetworkAddress.Family.INET6, packed_bytes=socket.inet_pton(socket.AF_INET6, "2001:720:1500:1::a100"), ) ] )] ) converter = export.ClientSummaryToExportedNetworkInterfaceConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), client_summary, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].mac_address, "123456".encode("hex")) self.assertEqual(results[0].ifname, "eth0") self.assertEqual(results[0].ip4_addresses, "127.0.0.1 10.0.0.1") self.assertEqual(results[0].ip6_addresses, "2001:720:1500:1::a100")
def testStatEntryToExportedRegistryKeyConverter(self): stat = rdfvalue.StatEntry( aff4path=rdfvalue.RDFURN( "aff4:/C.0000000000000000/registry/HKEY_USERS/S-1-5-20/Software/" "Microsoft/Windows/CurrentVersion/Run/Sidebar"), st_mode=32768, st_size=51, st_mtime=1247546054, registry_type=rdfvalue.StatEntry.RegistryType.REG_EXPAND_SZ, pathspec=rdfvalue.PathSpec( path="/HKEY_USERS/S-1-5-20/Software/Microsoft/Windows/" "CurrentVersion/Run/Sidebar", pathtype=rdfvalue.PathSpec.PathType.REGISTRY), registry_data=rdfvalue.DataBlob(string="Sidebar.exe")) converter = export.StatEntryToExportedRegistryKeyConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), stat, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].urn, rdfvalue.RDFURN( "aff4:/C.0000000000000000/registry/HKEY_USERS/S-1-5-20/Software/" "Microsoft/Windows/CurrentVersion/Run/Sidebar")) self.assertEqual(results[0].last_modified, rdfvalue.RDFDatetimeSeconds(1247546054)) self.assertEqual(results[0].type, rdfvalue.StatEntry.RegistryType.REG_EXPAND_SZ) self.assertEqual(results[0].data, "Sidebar.exe")
def testStatEntryToExportedFileConverterWithMissingAFF4File(self): stat = rdfvalue.StatEntry( aff4path=rdfvalue.RDFURN("aff4:/C.00000000000000/fs/os/some/path"), pathspec=rdfvalue.PathSpec(path="/some/path", pathtype=rdfvalue.PathSpec.PathType.OS), st_mode=33184, st_ino=1063090, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892) converter = export.StatEntryToExportedFileConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), stat, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].basename, "path") self.assertEqual(results[0].urn, rdfvalue.RDFURN("aff4:/C.00000000000000/fs/os/some/path")) self.assertEqual(results[0].st_mode, 33184) self.assertEqual(results[0].st_ino, 1063090) self.assertEqual(results[0].st_atime, 1336469177) self.assertEqual(results[0].st_mtime, 1336129892) self.assertEqual(results[0].st_ctime, 1336129892) self.assertFalse(results[0].HasField("content")) self.assertFalse(results[0].HasField("content_sha256")) self.assertFalse(results[0].HasField("hash_md5")) self.assertFalse(results[0].HasField("hash_sha1")) self.assertFalse(results[0].HasField("hash_sha256"))
def testRDFURNConverterWithURNPointingToCollection(self): urn = rdfvalue.RDFURN("aff4:/C.00000000000000/some/collection") fd = aff4.FACTORY.Create(urn, "RDFValueCollection", token=self.token) fd.Add(rdfvalue.StatEntry( aff4path=rdfvalue.RDFURN("aff4:/C.00000000000000/some/path"), pathspec=rdfvalue.PathSpec(path="/some/path", pathtype=rdfvalue.PathSpec.PathType.OS), st_mode=33184, st_ino=1063090, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892)) fd.Close() converter = export.RDFURNConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), urn, token=self.token)) self.assertTrue(len(results)) exported_files = [r for r in results if r.__class__.__name__ == "ExportedFile"] self.assertEqual(len(exported_files), 1) exported_file = exported_files[0] self.assertTrue(exported_file) self.assertEqual(exported_file.urn, rdfvalue.RDFURN("aff4:/C.00000000000000/some/path"))
def testConvertsHuntCollectionWithValuesWithMultipleConverters(self): fd = aff4.FACTORY.Create("aff4:/testcoll", "RDFValueCollection", token=self.token) msg = rdfvalue.GrrMessage(payload=DummyRDFValue3("some1")) msg.source = rdfvalue.ClientURN("C.0000000000000000") fd.Add(msg) test_lib.ClientFixture(msg.source, token=self.token) msg = rdfvalue.GrrMessage(payload=DummyRDFValue3("some2")) msg.source = rdfvalue.ClientURN("C.0000000000000001") fd.Add(msg) test_lib.ClientFixture(msg.source, token=self.token) fd.Close() fd = aff4.FACTORY.Open("aff4:/testcoll", aff4_type="RDFValueCollection", token=self.token) results = export.ConvertValues(rdfvalue.ExportedMetadata(), [fd], token=self.token) results = sorted(results, key=str) self.assertEqual(len(results), 4) self.assertEqual([str(v) for v in results if isinstance(v, rdfvalue.DummyRDFValue)], ["some1A", "some2A"]) self.assertEqual([str(v) for v in results if isinstance(v, rdfvalue.DummyRDFValue2)], ["some1B", "some2B"])
def testConvertsHuntCollectionWithValuesWithSingleConverter(self): fd = aff4.FACTORY.Create("aff4:/testcoll", "RDFValueCollection", token=self.token) msg = rdfvalue.GrrMessage(payload=DummyRDFValue("some")) msg.source = rdfvalue.ClientURN("C.0000000000000000") fd.Add(msg) test_lib.ClientFixture(msg.source, token=self.token) msg = rdfvalue.GrrMessage(payload=DummyRDFValue("some2")) msg.source = rdfvalue.ClientURN("C.0000000000000001") fd.Add(msg) test_lib.ClientFixture(msg.source, token=self.token) fd.Close() fd = aff4.FACTORY.Open("aff4:/testcoll", aff4_type="RDFValueCollection", token=self.token) results = export.ConvertValues(rdfvalue.ExportedMetadata(), [fd], token=self.token) results = sorted(str(v) for v in results) self.assertEqual(len(results), 2) self.assertEqual(results[0], "some") self.assertEqual(results[1], "some2")
def testConverterIsCorrectlyFound(self): dummy_value = DummyRDFValue("result") result = list(export.ConvertValues(rdfvalue.ExportedMetadata(), [dummy_value])) self.assertEqual(len(result), 1) self.assertTrue(isinstance(result[0], rdfvalue.RDFString)) self.assertEqual(result[0], "result")
def testStatEntryToExportedFileConverterWithFetchedAFF4File(self): client_ids = self.SetupClients(1) client_id = client_ids[0] pathspec = rdfvalue.PathSpec( pathtype=rdfvalue.PathSpec.PathType.OS, path=os.path.join(self.base_path, "winexec_img.dd")) pathspec.Append(path="/Ext2IFS_1_10b.exe", pathtype=rdfvalue.PathSpec.PathType.TSK) client_mock = test_lib.ActionMock("TransferBuffer", "StatFile", "HashBuffer") for _ in test_lib.TestFlowHelper( "GetFile", client_mock, token=self.token, client_id=client_id, pathspec=pathspec): pass urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(pathspec, client_id) fd = aff4.FACTORY.Open(urn, token=self.token) stat = fd.Get(fd.Schema.STAT) self.assertTrue(stat) converter = export.StatEntryToExportedFileConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), stat, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].basename, "Ext2IFS_1_10b.exe") self.assertEqual(results[0].urn, urn) # Check that by default file contents are not exported self.assertFalse(results[0].content) self.assertFalse(results[0].content_sha256) # Convert again, now specifying export_files_contents=True in options. converter = export.StatEntryToExportedFileConverter( options=rdfvalue.ExportOptions( export_files_contents=True)) results = list(converter.Convert(rdfvalue.ExportedMetadata(), stat, token=self.token)) self.assertTrue(results[0].content) self.assertEqual( results[0].content_sha256, "69264282ca1a3d4e7f9b1f43720f719a4ea47964f0bfd1b2ba88424f1c61395d")
def testProcessToExportedNetworkConnection(self): conn1 = rdfvalue.NetworkConnection( state=rdfvalue.NetworkConnection.State.LISTEN, type=rdfvalue.NetworkConnection.Type.SOCK_STREAM, local_address=rdfvalue.NetworkEndpoint( ip="0.0.0.0", port=22), remote_address=rdfvalue.NetworkEndpoint( ip="0.0.0.0", port=0), pid=2136, ctime=0) conn2 = rdfvalue.NetworkConnection( state=rdfvalue.NetworkConnection.State.LISTEN, type=rdfvalue.NetworkConnection.Type.SOCK_STREAM, local_address=rdfvalue.NetworkEndpoint( ip="192.168.1.1", port=31337), remote_address=rdfvalue.NetworkEndpoint( ip="1.2.3.4", port=6667), pid=1, ctime=0) process = rdfvalue.Process( pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6), connections=[conn1, conn2]) converter = export.ProcessToExportedNetworkConnectionConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), process, token=self.token)) self.assertEqual(len(results), 2) self.assertEqual(results[0].state, rdfvalue.NetworkConnection.State.LISTEN) self.assertEqual(results[0].type, rdfvalue.NetworkConnection.Type.SOCK_STREAM) self.assertEqual(results[0].local_address.ip, "0.0.0.0") self.assertEqual(results[0].local_address.port, 22) self.assertEqual(results[0].remote_address.ip, "0.0.0.0") self.assertEqual(results[0].remote_address.port, 0) self.assertEqual(results[0].pid, 2136) self.assertEqual(results[0].ctime, 0) self.assertEqual(results[1].state, rdfvalue.NetworkConnection.State.LISTEN) self.assertEqual(results[1].type, rdfvalue.NetworkConnection.Type.SOCK_STREAM) self.assertEqual(results[1].local_address.ip, "192.168.1.1") self.assertEqual(results[1].local_address.port, 31337) self.assertEqual(results[1].remote_address.ip, "1.2.3.4") self.assertEqual(results[1].remote_address.port, 6667) self.assertEqual(results[1].pid, 1) self.assertEqual(results[1].ctime, 0)
def testClientSummaryToExportedClientConverter(self): client_summary = rdfvalue.ClientSummary() metadata = rdfvalue.ExportedMetadata(hostname="ahostname") converter = export.ClientSummaryToExportedClientConverter() results = list(converter.Convert(metadata, client_summary, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].metadata.hostname, "ahostname")
def testVolatilityResultToExportedVolatilityMutantConverter(self): volatility_values_1 = rdfvalue.VolatilityValues(values=[ rdfvalue.VolatilityValue(value=50211728), rdfvalue.VolatilityValue(value=1), rdfvalue.VolatilityValue(value=1), rdfvalue.VolatilityValue(value=1), rdfvalue.VolatilityValue(value=0), rdfvalue.VolatilityValue(svalue=""), rdfvalue.VolatilityValue()]) volatility_values_2 = rdfvalue.VolatilityValues(values=[ rdfvalue.VolatilityValue(value=50740512), rdfvalue.VolatilityValue(value=2), rdfvalue.VolatilityValue(value=2), rdfvalue.VolatilityValue(value=0), rdfvalue.VolatilityValue(value=275427826012256), rdfvalue.VolatilityValue(svalue="163255304:2168"), rdfvalue.VolatilityValue(svalue="XYZLock")]) volatility_table = rdfvalue.VolatilityTable( headers=[rdfvalue.VolatilityHeader(name="offset_p"), rdfvalue.VolatilityHeader(name="ptr_count"), rdfvalue.VolatilityHeader(name="hnd_count"), rdfvalue.VolatilityHeader(name="mutant_signal"), rdfvalue.VolatilityHeader(name="mutant_thread"), rdfvalue.VolatilityHeader(name="cid"), rdfvalue.VolatilityHeader(name="mutant_name")], rows=[volatility_values_1, volatility_values_2]) volatility_result = rdfvalue.VolatilityResult( plugin="mutantscan", sections=[rdfvalue.VolatilitySection(table=volatility_table)]) converter = export.VolatilityResultToExportedVolatilityMutantConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), volatility_result, token=self.token)) self.assertEqual(len(results), 2) self.assertEqual(results[0].offset, 50211728) self.assertEqual(results[0].ptr_count, 1) self.assertEqual(results[0].handle_count, 1) self.assertEqual(results[0].signal, 1) self.assertEqual(results[0].thread, 0) self.assertEqual(results[0].cid, "") self.assertEqual(results[0].name, "") self.assertEqual(results[1].offset, 50740512) self.assertEqual(results[1].ptr_count, 2) self.assertEqual(results[1].handle_count, 2) self.assertEqual(results[1].signal, 0) self.assertEqual(results[1].thread, 275427826012256) self.assertEqual(results[1].cid, "163255304:2168") self.assertEqual(results[1].name, "XYZLock")
def testConvertsSingleValueWithMultipleAssociatedConverters(self): dummy_value = DummyRDFValue3("some") result = list(export.ConvertValues(rdfvalue.ExportedMetadata(), [dummy_value])) self.assertEqual(len(result), 2) self.assertTrue((isinstance(result[0], rdfvalue.DummyRDFValue) and isinstance(result[1], rdfvalue.DummyRDFValue2)) or (isinstance(result[0], rdfvalue.DummyRDFValue2) and isinstance(result[1], rdfvalue.DummyRDFValue))) self.assertTrue((result[0] == rdfvalue.DummyRDFValue("someA") and result[1] == rdfvalue.DummyRDFValue2("someB")) or (result[0] == rdfvalue.DummyRDFValue2("someB") and result[1] == rdfvalue.DummyRDFValue("someA")))
def testVolatilityResultToExportedVolatilityHandleConverter(self): volatility_values_1 = rdfvalue.VolatilityValues(values=[ rdfvalue.VolatilityValue(value=275427776305632), rdfvalue.VolatilityValue(value=4), rdfvalue.VolatilityValue(value=4), rdfvalue.VolatilityValue(value=2097151), rdfvalue.VolatilityValue(svalue="Process"), rdfvalue.VolatilityValue(svalue="System(4)"), rdfvalue.VolatilityValue()]) volatility_values_2 = rdfvalue.VolatilityValues(values=[ rdfvalue.VolatilityValue(value=273366078738336), rdfvalue.VolatilityValue(value=4), rdfvalue.VolatilityValue(value=8), rdfvalue.VolatilityValue(value=131103), rdfvalue.VolatilityValue(svalue="Key"), rdfvalue.VolatilityValue( svalue="MACHINE\\SYSTEM\\CONTROLSET001\\CONTROL\\HIVELIST")]) volatility_table = rdfvalue.VolatilityTable( headers=[rdfvalue.VolatilityHeader(name="offset_v"), rdfvalue.VolatilityHeader(name="pid"), rdfvalue.VolatilityHeader(name="handle"), rdfvalue.VolatilityHeader(name="access"), rdfvalue.VolatilityHeader(name="obj_type"), rdfvalue.VolatilityHeader(name="details")], rows=[volatility_values_1, volatility_values_2]) volatility_result = rdfvalue.VolatilityResult( plugin="mutantscan", sections=[rdfvalue.VolatilitySection(table=volatility_table)]) converter = export.VolatilityResultToExportedVolatilityHandleConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), volatility_result, token=self.token)) self.assertEqual(len(results), 2) self.assertEqual(results[0].offset, 275427776305632) self.assertEqual(results[0].pid, 4) self.assertEqual(results[0].handle, 4) self.assertEqual(results[0].access, 2097151) self.assertEqual(results[0].type, "Process") self.assertEqual(results[0].path, "System(4)") self.assertEqual(results[1].offset, 273366078738336) self.assertEqual(results[1].pid, 4) self.assertEqual(results[1].handle, 8) self.assertEqual(results[1].access, 131103) self.assertEqual(results[1].type, "Key") self.assertEqual(results[1].path, "MACHINE\\SYSTEM\\CONTROLSET001\\CONTROL\\HIVELIST")
def testGrrMessageConverterWithOneMissingClient(self): msg1 = rdfvalue.GrrMessage(payload=DummyRDFValue4("some")) msg1.source = rdfvalue.ClientURN("C.0000000000000000") test_lib.ClientFixture(msg1.source, token=self.token) msg2 = rdfvalue.GrrMessage(payload=DummyRDFValue4("some2")) msg2.source = rdfvalue.ClientURN("C.0000000000000001") metadata1 = rdfvalue.ExportedMetadata( timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(1), source_urn=rdfvalue.RDFURN("aff4:/hunts/W:000000/Results")) metadata2 = rdfvalue.ExportedMetadata( timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(2), source_urn=rdfvalue.RDFURN("aff4:/hunts/W:000001/Results")) converter = export.GrrMessageConverter() results = list(converter.BatchConvert( [(metadata1, msg1), (metadata2, msg2)], token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].timestamp, rdfvalue.RDFDatetime().FromSecondsFromEpoch(1)) self.assertEqual(results[0].source_urn, "aff4:/hunts/W:000000/Results")
def testGrrMessageConverter(self): msg = rdfvalue.GrrMessage(payload=DummyRDFValue4("some")) msg.source = rdfvalue.ClientURN("C.0000000000000000") test_lib.ClientFixture(msg.source, token=self.token) metadata = rdfvalue.ExportedMetadata( timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(1), source_urn=rdfvalue.RDFURN("aff4:/hunts/W:000000/Results")) converter = export.GrrMessageConverter() results = list(converter.Convert(metadata, msg, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].timestamp, rdfvalue.RDFDatetime().FromSecondsFromEpoch(1)) self.assertEqual(results[0].source_urn, "aff4:/hunts/W:000000/Results")
def ProcessResponses(self, responses): default_metadata = rdfvalue.ExportedMetadata( source_urn=self.state.collection_urn) if self.state.args.convert_values: # This is thread-safe - we just convert the values. converted_responses = export.ConvertValues( default_metadata, responses, token=self.state.token, options=self.state.args.export_options) else: converted_responses = responses # This is not thread-safe, therefore WriteValueToCSVFile is synchronized. self.WriteValuesToCSVFile(converted_responses)
def testBufferReferenceToExportedMatchConverter(self): buffer_reference = rdfvalue.BufferReference( offset=42, length=43, data="somedata", pathspec=rdfvalue.PathSpec(path="/some/path", pathtype=rdfvalue.PathSpec.PathType.OS)) metadata = rdfvalue.ExportedMetadata(client_urn="C.0000000000000001") converter = export.BufferReferenceToExportedMatchConverter() results = list(converter.Convert(metadata, buffer_reference, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].offset, 42) self.assertEqual(results[0].length, 43) self.assertEqual(results[0].data, "somedata") self.assertEqual( results[0].urn, rdfvalue.RDFURN("aff4:/C.0000000000000001/fs/os/some/path"))
def testProcessToExportedProcessConverter(self): process = rdfvalue.Process( pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6)) converter = export.ProcessToExportedProcessConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), process, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].pid, 2) self.assertEqual(results[0].ppid, 1) self.assertEqual(results[0].cmdline, "cmd.exe") self.assertEqual(results[0].exe, "c:\\windows\\cmd.exe") self.assertEqual(results[0].ctime, long(1333718907.167083 * 1e6))
def testProcessToExportedOpenFileConverter(self): process = rdfvalue.Process( pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6), open_files=["/some/a", "/some/b"]) converter = export.ProcessToExportedOpenFileConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), process, token=self.token)) self.assertEqual(len(results), 2) self.assertEqual(results[0].pid, 2) self.assertEqual(results[0].path, "/some/a") self.assertEqual(results[1].pid, 2) self.assertEqual(results[1].path, "/some/b")
def testStatEntryToExportedFileConverterWithHashedAFF4File(self): client_ids = self.SetupClients(1) client_id = client_ids[0] pathspec = rdfvalue.PathSpec( pathtype=rdfvalue.PathSpec.PathType.OS, path=os.path.join(self.base_path, "winexec_img.dd")) pathspec.Append(path="/Ext2IFS_1_10b.exe", pathtype=rdfvalue.PathSpec.PathType.TSK) urn = aff4.AFF4Object.VFSGRRClient.PathspecToURN(pathspec, client_id) client_mock = test_lib.ActionMock("TransferBuffer", "StatFile", "HashBuffer") for _ in test_lib.TestFlowHelper( "GetFile", client_mock, token=self.token, client_id=client_id, pathspec=pathspec): pass auth_state = rdfvalue.GrrMessage.AuthorizationState.AUTHENTICATED flow.Events.PublishEvent( "FileStore.AddFileToStore", rdfvalue.GrrMessage(payload=urn, auth_state=auth_state), token=self.token) worker = test_lib.MockWorker(token=self.token) worker.Simulate() fd = aff4.FACTORY.Open(urn, token=self.token) hash_value = fd.Get(fd.Schema.HASH) self.assertTrue(hash_value) converter = export.StatEntryToExportedFileConverter( options=rdfvalue.ExportOptions(export_files_hashes=True)) results = list(converter.Convert(rdfvalue.ExportedMetadata(), rdfvalue.StatEntry(aff4path=urn, pathspec=pathspec), token=self.token)) self.assertEqual(results[0].hash_md5, "bb0a15eefe63fd41f8dc9dee01c5cf9a") self.assertEqual(results[0].hash_sha1, "7dd6bee591dfcb6d75eb705405302c3eab65e21a") self.assertEqual( results[0].hash_sha256, "0e8dc93e150021bb4752029ebbff51394aa36f069cf19901578e4f06017acdb5")
def BatchConvert(self, metadata_value_pairs, token=None): """Convert batch of FileStoreHashs.""" urns = [urn for metadata, urn in metadata_value_pairs] urns_dict = dict([(urn, metadata) for metadata, urn in metadata_value_pairs]) results = [] for hash_urn, client_files in filestore.HashFileStore.GetClientsForHashes( urns, token=token): for hit in client_files: metadata = rdfvalue.ExportedMetadata(urns_dict[hash_urn]) metadata.client_urn = rdfvalue.RDFURN(hit).Split(2)[0] result = rdfvalue.ExportedFileStoreHash( metadata=metadata, hash=hash_urn.hash_value, fingerprint_type=hash_urn.fingerprint_type, hash_type=hash_urn.hash_type, target_urn=hit) results.append(result) return results
def BatchConvert(self, metadata_value_pairs, token=None): """Converts a batch of StatEntry value to ExportedFile values at once. Args: metadata_value_pairs: a list or a generator of tuples (metadata, value), where metadata is ExportedMetadata to be used for conversion and value is a StatEntry to be converted. token: Security token: Returns: Resulting RDFValues. Empty list is a valid result and means that conversion wasn't possible. """ # Find set of converters for the first message payload. # We assume that payload is of the same type for all the messages in the # batch. converters_classes = ExportConverter.GetConvertersByValue( metadata_value_pairs[0][1].payload) converters = [cls(self.options) for cls in converters_classes] # Group messages by source (i.e. by client urn). msg_dict = {} for metadata, msg in metadata_value_pairs: if msg.source not in msg_dict: msg_dict[msg.source] = [] msg_dict[msg.source].append((metadata, msg)) metadata_objects = [] metadata_to_fetch = [] # Open the clients we don't have metadata for and fetch metadata. for client_urn in msg_dict.iterkeys(): try: metadata_objects.append(self.cached_metadata[client_urn]) except KeyError: metadata_to_fetch.append(client_urn) if metadata_to_fetch: client_fds = aff4.FACTORY.MultiOpen(metadata_to_fetch, mode="r", token=token) fetched_metadata = [ GetMetadata(client_fd, token=token) for client_fd in client_fds ] for metadata in fetched_metadata: self.cached_metadata[metadata.client_urn] = metadata metadata_objects.extend(fetched_metadata) # Get session id and timestamp from the original metadata provided. batch_data = [] for metadata in metadata_objects: try: for original_metadata, message in msg_dict[ metadata.client_urn]: new_metadata = rdfvalue.ExportedMetadata(metadata) new_metadata.source_urn = original_metadata.source_urn new_metadata.timestamp = original_metadata.timestamp batch_data.append((new_metadata, message.payload)) except KeyError: pass converted_batch = [] for converter in converters: converted_batch.extend( converter.BatchConvert(batch_data, token=token)) return converted_batch
def testFileFinderResultExportConverter(self): pathspec = rdfvalue.PathSpec(path="/some/path", pathtype=rdfvalue.PathSpec.PathType.OS) match1 = rdfvalue.BufferReference( offset=42, length=43, data="somedata1", pathspec=pathspec) match2 = rdfvalue.BufferReference( offset=44, length=45, data="somedata2", pathspec=pathspec) stat_entry = rdfvalue.StatEntry( aff4path=rdfvalue.RDFURN("aff4:/C.00000000000001/fs/os/some/path"), pathspec=pathspec, st_mode=33184, st_ino=1063090, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892) file_finder_result = rdfvalue.FileFinderResult(stat_entry=stat_entry, matches=[match1, match2]) metadata = rdfvalue.ExportedMetadata(client_urn="C.0000000000000001") converter = export.FileFinderResultConverter() results = list(converter.Convert(metadata, file_finder_result, token=self.token)) # We expect 1 ExportedFile instances in the results exported_files = [result for result in results if isinstance(result, rdfvalue.ExportedFile)] self.assertEqual(len(exported_files), 1) self.assertEqual(exported_files[0].basename, "path") self.assertEqual(exported_files[0].urn, rdfvalue.RDFURN("aff4:/C.00000000000001/fs/os/some/path")) self.assertEqual(exported_files[0].st_mode, 33184) self.assertEqual(exported_files[0].st_ino, 1063090) self.assertEqual(exported_files[0].st_atime, 1336469177) self.assertEqual(exported_files[0].st_mtime, 1336129892) self.assertEqual(exported_files[0].st_ctime, 1336129892) self.assertFalse(exported_files[0].HasField("content")) self.assertFalse(exported_files[0].HasField("content_sha256")) self.assertFalse(exported_files[0].HasField("hash_md5")) self.assertFalse(exported_files[0].HasField("hash_sha1")) self.assertFalse(exported_files[0].HasField("hash_sha256")) # We expect 2 ExportedMatch instances in the results exported_matches = [result for result in results if isinstance(result, rdfvalue.ExportedMatch)] exported_matches = sorted(exported_matches, key=lambda x: x.offset) self.assertEqual(len(exported_matches), 2) self.assertEqual(exported_matches[0].offset, 42) self.assertEqual(exported_matches[0].length, 43) self.assertEqual(exported_matches[0].data, "somedata1") self.assertEqual( exported_matches[0].urn, rdfvalue.RDFURN("aff4:/C.0000000000000001/fs/os/some/path")) self.assertEqual(exported_matches[1].offset, 44) self.assertEqual(exported_matches[1].length, 45) self.assertEqual(exported_matches[1].data, "somedata2") self.assertEqual( exported_matches[1].urn, rdfvalue.RDFURN("aff4:/C.0000000000000001/fs/os/some/path"))
def BatchConvert(self, metadata_value_pairs, token=None): """Converts a batch of GrrMessages into a set of RDFValues at once. Args: metadata_value_pairs: a list or a generator of tuples (metadata, value), where metadata is ExportedMetadata to be used for conversion and value is a GrrMessage to be converted. token: Security token. Returns: Resulting RDFValues. Empty list is a valid result and means that conversion wasn't possible. """ # Group messages by source (i.e. by client urn). msg_dict = {} for metadata, msg in metadata_value_pairs: msg_dict.setdefault(msg.source, []).append((metadata, msg)) metadata_objects = [] metadata_to_fetch = [] # Open the clients we don't have metadata for and fetch metadata. for client_urn in msg_dict.iterkeys(): try: metadata_objects.append(self.cached_metadata[client_urn]) except KeyError: metadata_to_fetch.append(client_urn) if metadata_to_fetch: client_fds = aff4.FACTORY.MultiOpen(metadata_to_fetch, mode="r", token=token) fetched_metadata = [ GetMetadata(client_fd, token=token) for client_fd in client_fds ] for metadata in fetched_metadata: self.cached_metadata[metadata.client_urn] = metadata metadata_objects.extend(fetched_metadata) data_by_type = {} for metadata in metadata_objects: try: for original_metadata, message in msg_dict[ metadata.client_urn]: # Get source_urn and annotations from the original metadata # provided and original_timestamp from the payload age. new_metadata = rdfvalue.ExportedMetadata(metadata) new_metadata.source_urn = original_metadata.source_urn new_metadata.annotations = original_metadata.annotations new_metadata.original_timestamp = message.payload.age cls_name = message.payload.__class__.__name__ # Create a dict of values for conversion keyed by type, so we can # apply the right converters to the right object types if cls_name not in data_by_type: converters_classes = ExportConverter.GetConvertersByValue( message.payload) data_by_type[cls_name] = { "converters": [cls(self.options) for cls in converters_classes], "batch_data": [(new_metadata, message.payload)] } else: data_by_type[cls_name]["batch_data"].append( (new_metadata, message.payload)) except KeyError: pass # Run all converters against all objects of the relevant type converted_batch = [] for dataset in data_by_type.values(): for converter in dataset["converters"]: converted_batch.extend( converter.BatchConvert(dataset["batch_data"], token=token)) return converted_batch
def testRaisesWhenNoConverterFound(self): dummy_value = DummyRDFValue2("some") result_gen = export.ConvertValues(rdfvalue.ExportedMetadata(), [dummy_value]) self.assertRaises(export.NoConverterFound, list, result_gen)