示例#1
0
    def testGrrMessageConverterMultipleTypes(self):
        payload1 = DummyTestRDFValue3("some")
        client_id = "C.0000000000000000"
        msg1 = rdf_flows.GrrMessage(payload=payload1)
        msg1.source = client_id
        fixture_test_lib.ClientFixture(client_id)

        payload2 = DummyTestRDFValue5("some2")
        msg2 = rdf_flows.GrrMessage(payload=payload2)
        msg2.source = client_id

        metadata1 = base.ExportedMetadata(
            source_urn=rdfvalue.RDFURN("aff4:/hunts/" + str(queues.HUNTS) +
                                       ":000000/Results"))
        metadata2 = base.ExportedMetadata(
            source_urn=rdfvalue.RDFURN("aff4:/hunts/" + str(queues.HUNTS) +
                                       ":000001/Results"))

        converter = grr_message.GrrMessageConverter()
        with test_lib.FakeTime(3):
            results = list(
                converter.BatchConvert([(metadata1, msg1), (metadata2, msg2)]))

        self.assertLen(results, 3)
        # RDFValue3 gets converted to RDFValue2 and RDFValue, RDFValue5 stays at 5.
        self.assertCountEqual(
            ["DummyTestRDFValue2", "DummyTestRDFValue1", "DummyTestRDFValue5"],
            [x.__class__.__name__ for x in results])
示例#2
0
    def testGrrMessageConverterWithOneMissingClient(self):
        client_id_1 = "C.0000000000000000"
        client_id_2 = "C.0000000000000001"

        payload1 = DummyTestRDFValue4("some")
        msg1 = rdf_flows.GrrMessage(payload=payload1)
        msg1.source = client_id_1
        fixture_test_lib.ClientFixture(client_id_1)

        payload2 = DummyTestRDFValue4("some2")
        msg2 = rdf_flows.GrrMessage(payload=payload2)
        msg2.source = client_id_2

        metadata1 = base.ExportedMetadata(
            source_urn=rdfvalue.RDFURN("aff4:/hunts/" + str(queues.HUNTS) +
                                       ":000000/Results"))
        metadata2 = base.ExportedMetadata(
            source_urn=rdfvalue.RDFURN("aff4:/hunts/" + str(queues.HUNTS) +
                                       ":000001/Results"))

        converter = grr_message.GrrMessageConverter()
        with test_lib.FakeTime(3):
            results = list(
                converter.BatchConvert([(metadata1, msg1), (metadata2, msg2)]))

        self.assertLen(results, 1)
        self.assertEqual(results[0].timestamp,
                         rdfvalue.RDFDatetime.FromSecondsSinceEpoch(3))
        self.assertEqual(
            results[0].source_urn,
            "aff4:/hunts/" + str(queues.HUNTS) + ":000000/Results")
示例#3
0
 def ConvertOriginalValue(self, original_value):
     converted_values = list(
         data_agnostic.DataAgnosticExportConverter().Convert(
             base.ExportedMetadata(source_urn=rdfvalue.RDFURN("aff4:/foo")),
             original_value))
     self.assertLen(converted_values, 1)
     return converted_values[0]
示例#4
0
    def _GetMetadataForClients(self, client_urns):
        """Fetches metadata for a given list of clients."""

        result = {}
        metadata_to_fetch = set()

        for urn in client_urns:
            try:
                result[urn] = self._cached_metadata[urn]
            except KeyError:
                metadata_to_fetch.add(urn)

        if metadata_to_fetch:
            client_ids = set(urn.Basename() for urn in metadata_to_fetch)
            infos = data_store.REL_DB.MultiReadClientFullInfo(client_ids)

            fetched_metadata = [
                export.GetMetadata(client_id, info)
                for client_id, info in infos.items()
            ]

            for metadata in fetched_metadata:
                metadata.source_urn = self.source_urn

                self._cached_metadata[metadata.client_urn] = metadata
                result[metadata.client_urn] = metadata
                metadata_to_fetch.remove(metadata.client_urn)

            for urn in metadata_to_fetch:
                default_mdata = base.ExportedMetadata(
                    source_urn=self.source_urn)
                result[urn] = default_mdata
                self._cached_metadata[urn] = default_mdata

        return [result[urn] for urn in client_urns]
示例#5
0
  def testClientSummaryToExportedClientConverter(self):
    summary = rdf_client.ClientSummary()
    metadata = base.ExportedMetadata(hostname="ahostname")

    converter = client_summary.ClientSummaryToExportedClientConverter()
    results = list(converter.Convert(metadata, summary))

    self.assertLen(results, 1)
    self.assertEqual(results[0].metadata.hostname, "ahostname")
示例#6
0
    def ProcessResponses(self, state, responses):
        default_metadata = base.ExportedMetadata(annotations=u",".join(
            self.args.export_options.annotations),
                                                 source_urn=self.source_urn)

        if self.args.convert_values:
            # This is thread-safe - we just convert the values.
            converted_responses = export.ConvertValues(
                default_metadata, responses, options=self.args.export_options)
        else:
            converted_responses = responses

        # This is not thread-safe, therefore WriteValueToJSONFile is synchronized.
        self.WriteValuesToJSONFile(state, converted_responses)
示例#7
0
文件: export.py 项目: avmi/grr
def GetMetadata(client_id, client_full_info):
    """Builds ExportedMetadata object for a given client id and ClientFullInfo."""

    metadata = base.ExportedMetadata()

    last_snapshot = None
    if client_full_info.HasField("last_snapshot"):
        last_snapshot = client_full_info.last_snapshot

    metadata.client_urn = client_id
    metadata.client_age = client_full_info.metadata.first_seen

    if last_snapshot is not None:
        kb = client_full_info.last_snapshot.knowledge_base

        metadata.hostname = kb.fqdn
        metadata.os = kb.os
        metadata.uname = last_snapshot.Uname()
        metadata.os_release = last_snapshot.os_release
        metadata.os_version = last_snapshot.os_version
        metadata.usernames = ",".join(user.username for user in kb.users)

        addresses = last_snapshot.GetMacAddresses()
        if addresses:
            metadata.mac_address = "\n".join(last_snapshot.GetMacAddresses())
        metadata.hardware_info = last_snapshot.hardware_info
        metadata.kernel_version = last_snapshot.kernel

        ci = last_snapshot.cloud_instance
        if ci is not None:
            if ci.cloud_type == ci.InstanceType.AMAZON:
                metadata.cloud_instance_type = metadata.CloudInstanceType.AMAZON
                metadata.cloud_instance_id = ci.amazon.instance_id
            elif ci.cloud_type == ci.InstanceType.GOOGLE:
                metadata.cloud_instance_type = metadata.CloudInstanceType.GOOGLE
                metadata.cloud_instance_id = ci.google.unique_id

    system_labels = set()
    user_labels = set()
    for l in client_full_info.labels:
        if l.owner == "GRR":
            system_labels.add(l.name)
        else:
            user_labels.add(l.name)

    metadata.labels = ",".join(sorted(system_labels | user_labels))
    metadata.system_labels = ",".join(sorted(system_labels))
    metadata.user_labels = ",".join(sorted(user_labels))

    return metadata
示例#8
0
文件: file.py 项目: avmi/grr
    def GetExportedResult(self, original_result, converter, metadata=None):
        """Converts original result via given converter.."""

        exported_results = list(
            converter.Convert(metadata or base.ExportedMetadata(),
                              original_result))

        if not exported_results:
            raise export.ExportError("Got 0 exported result when a single one "
                                     "was expected.")

        if len(exported_results) > 1:
            raise export.ExportError(
                "Got > 1 exported results when a single "
                "one was expected, seems like a logical bug.")

        return exported_results[0]
示例#9
0
    def testExportsFileWithTwoEntries(self):
        sample = rdf_cronjobs.CronTabFile(
            path="/etc/foo.cron",
            jobs=[
                rdf_cronjobs.CronTabEntry(minute="1",
                                          hour="2",
                                          dayofmonth="3",
                                          month="4",
                                          dayofweek="1",
                                          command="bash",
                                          comment="foo"),
                rdf_cronjobs.CronTabEntry(minute="aa",
                                          hour="bb",
                                          dayofmonth="cc",
                                          month="dd",
                                          dayofweek="ee",
                                          command="ps",
                                          comment="some"),
            ])

        converter = cron_tab_file.CronTabFileConverter()
        converted = list(
            converter.Convert(base.ExportedMetadata(self.metadata), sample))

        self.assertLen(converted, 2)
        self.assertIsInstance(converted[0], cron_tab_file.ExportedCronTabEntry)

        self.assertEqual(converted[0].metadata, self.metadata)
        self.assertEqual(converted[0].cron_file_path, "/etc/foo.cron")
        self.assertEqual(converted[0].minute, "1")
        self.assertEqual(converted[0].hour, "2")
        self.assertEqual(converted[0].dayofmonth, "3")
        self.assertEqual(converted[0].month, "4")
        self.assertEqual(converted[0].dayofweek, "1")
        self.assertEqual(converted[0].command, "bash")
        self.assertEqual(converted[0].comment, "foo")

        self.assertEqual(converted[1].metadata, self.metadata)
        self.assertEqual(converted[1].cron_file_path, "/etc/foo.cron")
        self.assertEqual(converted[1].minute, "aa")
        self.assertEqual(converted[1].hour, "bb")
        self.assertEqual(converted[1].dayofmonth, "cc")
        self.assertEqual(converted[1].month, "dd")
        self.assertEqual(converted[1].dayofweek, "ee")
        self.assertEqual(converted[1].command, "ps")
        self.assertEqual(converted[1].comment, "some")
示例#10
0
    def testGrrMessageConverter(self):
        payload = DummyTestRDFValue4("some")
        msg = rdf_flows.GrrMessage(payload=payload)
        msg.source = self.client_id
        fixture_test_lib.ClientFixture(self.client_id)

        metadata = base.ExportedMetadata(
            source_urn=rdfvalue.RDFURN("aff4:/hunts/" + str(queues.HUNTS) +
                                       ":000000/Results"))

        converter = grr_message.GrrMessageConverter()
        with test_lib.FakeTime(2):
            results = list(converter.Convert(metadata, msg))

        self.assertLen(results, 1)
        self.assertEqual(results[0].timestamp,
                         rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2))
        self.assertEqual(
            results[0].source_urn,
            "aff4:/hunts/" + str(queues.HUNTS) + ":000000/Results")
示例#11
0
 def setUp(self):
     super().setUp()
     self.client_id = self.SetupClient(0)
     self.metadata = base.ExportedMetadata(client_urn=self.client_id)
示例#12
0
文件: osquery_test.py 项目: avmi/grr
 def setUp(self):
     super().setUp()
     self.converter = osquery.OsqueryExportConverter()
     self.metadata = base.ExportedMetadata(client_urn="C.48515162342ABCDE")
示例#13
0
文件: grr_message.py 项目: avmi/grr
    def BatchConvert(self, metadata_value_pairs):
        """Converts a batch of GrrMessages into a set of RDFValues at once.

    Args:
      metadata_value_pairs: a list or a generator of tuples (metadata, value),
        where metadata is ExportedMetadata to be used for conversion and value
        is a GrrMessage to be converted.

    Returns:
      Resulting RDFValues. Empty list is a valid result and means that
      conversion wasn't possible.
    """

        # Group messages by source (i.e. by client urn).
        msg_dict = {}
        for metadata, msg in metadata_value_pairs:
            msg_dict.setdefault(msg.source, []).append((metadata, msg))

        metadata_objects = []
        metadata_to_fetch = []

        # Open the clients we don't have metadata for and fetch metadata.
        for client_urn in msg_dict:
            try:
                metadata_objects.append(self.cached_metadata[client_urn])
            except KeyError:
                metadata_to_fetch.append(client_urn)

        if metadata_to_fetch:
            client_ids = set(urn.Basename() for urn in metadata_to_fetch)
            infos = data_store.REL_DB.MultiReadClientFullInfo(client_ids)

            fetched_metadata = [
                export.GetMetadata(client_id, info)
                for client_id, info in infos.items()
            ]

            for metadata in fetched_metadata:
                self.cached_metadata[metadata.client_urn] = metadata
            metadata_objects.extend(fetched_metadata)

        data_by_type = {}
        for metadata in metadata_objects:
            try:
                for original_metadata, message in msg_dict[
                        metadata.client_urn]:
                    # Get source_urn and annotations from the original metadata
                    # provided.
                    new_metadata = base.ExportedMetadata(metadata)
                    new_metadata.source_urn = original_metadata.source_urn
                    new_metadata.annotations = original_metadata.annotations
                    cls_name = message.payload.__class__.__name__

                    # Create a dict of values for conversion keyed by type, so we can
                    # apply the right converters to the right object types
                    if cls_name not in data_by_type:
                        converters_classes = export_converters_registry.GetConvertersByValue(
                            message.payload)
                        data_by_type[cls_name] = {
                            "converters":
                            [cls(self.options) for cls in converters_classes],
                            "batch_data": [(new_metadata, message.payload)]
                        }
                    else:
                        data_by_type[cls_name]["batch_data"].append(
                            (new_metadata, message.payload))

            except KeyError:
                pass

        # Run all converters against all objects of the relevant type
        converted_batch = []
        for dataset in data_by_type.values():
            for converter in dataset["converters"]:
                converted_batch.extend(
                    converter.BatchConvert(dataset["batch_data"]))

        return converted_batch