def _GetMetadataForClients(self, client_urns): """Fetches metadata for a given list of clients.""" result = {} metadata_to_fetch = set() for urn in client_urns: try: result[urn] = self._cached_metadata[urn] except KeyError: metadata_to_fetch.add(urn) if metadata_to_fetch: client_fds = aff4.FACTORY.MultiOpen(metadata_to_fetch, mode="r", token=self.token) fetched_metadata = [ export.GetMetadata(client_fd, token=self.token) for client_fd in client_fds ] for metadata in fetched_metadata: metadata.source_urn = self.source_urn self._cached_metadata[metadata.client_urn] = metadata result[metadata.client_urn] = metadata metadata_to_fetch.remove(metadata.client_urn) for urn in metadata_to_fetch: default_mdata = export.ExportedMetadata( source_urn=self.source_urn) result[urn] = default_mdata self._cached_metadata[urn] = default_mdata return [result[urn] for urn in client_urns]
def _GetMetadataForClients(self, client_urns): """Fetches metadata for a given list of clients.""" result = {} metadata_to_fetch = set() for urn in client_urns: try: result[urn] = self._cached_metadata[urn] except KeyError: metadata_to_fetch.add(urn) if metadata_to_fetch: client_ids = set(urn.Basename() for urn in metadata_to_fetch) infos = data_store.REL_DB.MultiReadClientFullInfo(client_ids) fetched_metadata = [ export.GetMetadata(client_id, info) for client_id, info in infos.items() ] for metadata in fetched_metadata: metadata.source_urn = self.source_urn self._cached_metadata[metadata.client_urn] = metadata result[metadata.client_urn] = metadata metadata_to_fetch.remove(metadata.client_urn) for urn in metadata_to_fetch: default_mdata = export.ExportedMetadata( source_urn=self.source_urn) result[urn] = default_mdata self._cached_metadata[urn] = default_mdata return [result[urn] for urn in client_urns]
def testGetMetadataWithoutCloudInstanceSet(self): fixture_test_lib.ClientFixture(self.client_id) metadata = export.GetMetadata( self.client_id, data_store.REL_DB.ReadClientFullInfo(self.client_id)) self.assertFalse(metadata.HasField("cloud_instance_type")) self.assertFalse(metadata.HasField("cloud_instance_id"))
def testGetMetadataMissingKB(self): # We do not want to use `self.client_id` in this test because we need an # uninitialized client. client_id = "C.4815162342108108" data_store.REL_DB.WriteClientMetadata( client_id, first_seen=rdfvalue.RDFDatetime(42)) # Expect empty usernames field due to no knowledge base. metadata = export.GetMetadata( client_id, data_store.REL_DB.ReadClientFullInfo(client_id)) self.assertFalse(metadata.usernames)
def testGetMetadataWithSystemLabels(self): fixture_test_lib.ClientFixture(self.client_id) self.AddClientLabel(self.client_id, self.test_username, "a") self.AddClientLabel(self.client_id, self.test_username, "b") self.AddClientLabel(self.client_id, "GRR", "c") metadata = export.GetMetadata( self.client_id, data_store.REL_DB.ReadClientFullInfo(self.client_id)) self.assertEqual(metadata.labels, "a,b,c") self.assertEqual(metadata.user_labels, "a,b") self.assertEqual(metadata.system_labels, "c")
def testGetMetadataWithSingleUserLabel(self): fixture_test_lib.ClientFixture(self.client_id) self.AddClientLabel(self.client_id, self.test_username, "client-label-24") metadata = export.GetMetadata( self.client_id, data_store.REL_DB.ReadClientFullInfo(self.client_id)) self.assertEqual(metadata.os, "Windows") self.assertEqual(metadata.labels, "client-label-24") self.assertEqual(metadata.user_labels, "client-label-24") self.assertEqual(metadata.system_labels, "") self.assertEqual(metadata.hardware_info.bios_version, "Version 1.23v")
def testGetMetadataWithAmazonCloudInstanceID(self): fixture_test_lib.ClientFixture(self.client_id) snapshot = data_store.REL_DB.ReadClientSnapshot(self.client_id) snapshot.cloud_instance = rdf_cloud.CloudInstance( cloud_type=rdf_cloud.CloudInstance.InstanceType.AMAZON, amazon=rdf_cloud.AmazonCloudInstance(instance_id="foo/bar")) data_store.REL_DB.WriteClientSnapshot(snapshot) metadata = export.GetMetadata( self.client_id, data_store.REL_DB.ReadClientFullInfo(self.client_id)) self.assertEqual(metadata.cloud_instance_type, metadata.CloudInstanceType.AMAZON) self.assertEqual(metadata.cloud_instance_id, "foo/bar")
def testGetMetadataWithGoogleCloudInstanceID(self): fixture_test_lib.ClientFixture(self.client_id) snapshot = data_store.REL_DB.ReadClientSnapshot(self.client_id) snapshot.cloud_instance = rdf_cloud.CloudInstance( cloud_type=rdf_cloud.CloudInstance.InstanceType.GOOGLE, google=rdf_cloud.GoogleCloudInstance(unique_id="foo/bar")) data_store.REL_DB.WriteClientSnapshot(snapshot) metadata = export.GetMetadata( self.client_id, data_store.REL_DB.ReadClientFullInfo(self.client_id)) self.assertEqual(metadata.cloud_instance_type, metadata.CloudInstanceType.GOOGLE) self.assertEqual(metadata.cloud_instance_id, "foo/bar")
def _GetMetadataForClients(self, client_urns): """Fetches metadata for a given list of clients.""" result = {} metadata_to_fetch = set() for urn in client_urns: try: result[urn] = self._cached_metadata[urn] except KeyError: metadata_to_fetch.add(urn) if metadata_to_fetch: if data_store.RelationalDBEnabled(): client_ids = set(urn.Basename() for urn in metadata_to_fetch) infos = data_store.REL_DB.MultiReadClientFullInfo(client_ids) fetched_metadata = [ export.GetMetadata(client_id, info) for client_id, info in infos.items() ] else: client_fds = aff4.FACTORY.MultiOpen(metadata_to_fetch, mode="r", token=self.token) fetched_metadata = [ export.GetMetadataLegacy(client_fd, token=self.token) for client_fd in client_fds ] for metadata in fetched_metadata: metadata.source_urn = self.source_urn self._cached_metadata[metadata.client_urn] = metadata result[metadata.client_urn] = metadata metadata_to_fetch.remove(metadata.client_urn) for urn in metadata_to_fetch: default_mdata = export.ExportedMetadata( source_urn=self.source_urn) result[urn] = default_mdata self._cached_metadata[urn] = default_mdata return [result[urn] for urn in client_urns]
def _GetClientMetadata(self, client_id: Text) -> export.ExportedMetadata: info = data_store.REL_DB.ReadClientFullInfo(client_id) metadata = export.GetMetadata(client_id, info) metadata.timestamp = None # timestamp is sent outside of metadata. return metadata
def BatchConvert(self, metadata_value_pairs): """Converts a batch of GrrMessages into a set of RDFValues at once. Args: metadata_value_pairs: a list or a generator of tuples (metadata, value), where metadata is ExportedMetadata to be used for conversion and value is a GrrMessage to be converted. Returns: Resulting RDFValues. Empty list is a valid result and means that conversion wasn't possible. """ # Group messages by source (i.e. by client urn). msg_dict = {} for metadata, msg in metadata_value_pairs: msg_dict.setdefault(msg.source, []).append((metadata, msg)) metadata_objects = [] metadata_to_fetch = [] # Open the clients we don't have metadata for and fetch metadata. for client_urn in msg_dict: try: metadata_objects.append(self.cached_metadata[client_urn]) except KeyError: metadata_to_fetch.append(client_urn) if metadata_to_fetch: client_ids = set(urn.Basename() for urn in metadata_to_fetch) infos = data_store.REL_DB.MultiReadClientFullInfo(client_ids) fetched_metadata = [ export.GetMetadata(client_id, info) for client_id, info in infos.items() ] for metadata in fetched_metadata: self.cached_metadata[metadata.client_urn] = metadata metadata_objects.extend(fetched_metadata) data_by_type = {} for metadata in metadata_objects: try: for original_metadata, message in msg_dict[ metadata.client_urn]: # Get source_urn and annotations from the original metadata # provided. new_metadata = base.ExportedMetadata(metadata) new_metadata.source_urn = original_metadata.source_urn new_metadata.annotations = original_metadata.annotations cls_name = message.payload.__class__.__name__ # Create a dict of values for conversion keyed by type, so we can # apply the right converters to the right object types if cls_name not in data_by_type: converters_classes = export_converters_registry.GetConvertersByValue( message.payload) data_by_type[cls_name] = { "converters": [cls(self.options) for cls in converters_classes], "batch_data": [(new_metadata, message.payload)] } else: data_by_type[cls_name]["batch_data"].append( (new_metadata, message.payload)) except KeyError: pass # Run all converters against all objects of the relevant type converted_batch = [] for dataset in data_by_type.values(): for converter in dataset["converters"]: converted_batch.extend( converter.BatchConvert(dataset["batch_data"])) return converted_batch