Exemplo n.º 1
0
    def ProcessResponses(self,
                         plugin_args=None,
                         responses=None,
                         process_responses_separately=False):
        plugin = bigquery_plugin.BigQueryOutputPlugin(
            source_urn=self.results_urn,
            output_base_urn=self.base_urn,
            args=plugin_args,
            token=self.token)

        plugin.Initialize()

        messages = []
        for response in responses:
            messages.append(
                rdf_flows.GrrMessage(source=self.client_id, payload=response))

        with test_lib.FakeTime(1445995873):
            with mock.patch.object(bigquery,
                                   "GetBigQueryClient") as mock_bigquery:
                if process_responses_separately:
                    for message in messages:
                        plugin.ProcessResponses([message])
                else:
                    plugin.ProcessResponses(messages)

                plugin.Flush()

        return [
            x[0] for x in mock_bigquery.return_value.InsertData.call_args_list
        ]
Exemplo n.º 2
0
    def testBigQueryPluginFallbackToAFF4(self):
        plugin_args = bigquery_plugin.BigQueryOutputPluginArgs()
        responses = [
            rdf_client.StatEntry(
                aff4path=self.client_id.Add("/fs/os/中国新闻网新闻中"),
                pathspec=rdf_paths.PathSpec(path="/中国新闻网新闻中")),
            rdf_client.Process(pid=42),
            rdf_client.Process(pid=43),
            rdf_client.SoftwarePackage(name="test.deb")
        ]

        plugin = bigquery_plugin.BigQueryOutputPlugin(
            source_urn=self.results_urn,
            output_base_urn=self.base_urn,
            args=plugin_args,
            token=self.token)

        plugin.Initialize()

        messages = []
        for response in responses:
            messages.append(
                rdf_flows.GrrMessage(source=self.client_id, payload=response))

        with test_lib.FakeTime(1445995873):
            with mock.patch.object(bigquery,
                                   "GetBigQueryClient") as mock_bigquery:
                mock_bigquery.return_value.configure_mock(**{
                    "InsertData.side_effect":
                    bigquery.BigQueryJobUploadError()
                })
                with test_lib.ConfigOverrider(
                    {"BigQuery.max_upload_failures": 2}):
                    for message in messages:
                        plugin.ProcessResponses([message])
                    plugin.Flush()

                    # We have 3 output types but a limit of 2 upload failures, so we
                    # shouldn't try the third one.
                    self.assertEqual(
                        mock_bigquery.return_value.InsertData.call_count, 2)

        # We should have written a data file and a schema file for each type.
        for output_name in [
                "ExportedFile", "ExportedProcess",
                "AutoExportedSoftwarePackage"
        ]:
            schema_fd = aff4.FACTORY.Open(self.base_urn.Add(
                "C-1000000000000000_Results_%s_1445995873.schema" %
                output_name),
                                          token=self.token)
            data_fd = aff4.FACTORY.Open(self.base_urn.Add(
                "C-1000000000000000_Results_%s_1445995873.data" % output_name),
                                        token=self.token)
            actual_fd = gzip.GzipFile(None, "r", 9, data_fd)

            if output_name == "ExportedFile":
                self.CompareSchemaToKnownGood(json.load(schema_fd))
                self.assertEqual(
                    json.load(actual_fd)["urn"],
                    self.client_id.Add("/fs/os/中国新闻网新闻中"))
            elif output_name == "ExportedProcess":
                self.assertEqual(json.load(schema_fd)[1]["name"], "pid")
                expected_pids = ["42", "43"]
                for i, line in enumerate(actual_fd):
                    self.assertEqual(json.loads(line)["pid"], expected_pids[i])
            else:
                self.assertEqual(json.load(schema_fd)[1]["name"], "name")
                self.assertEqual(json.load(actual_fd)["name"], "test.deb")

        # Process the same messages to make sure we're re-using the filehandles.
        with test_lib.FakeTime(1445995878):
            with mock.patch.object(bigquery,
                                   "GetBigQueryClient") as mock_bigquery:
                mock_bigquery.return_value.configure_mock(**{
                    "InsertData.side_effect":
                    bigquery.BigQueryJobUploadError()
                })
                with test_lib.ConfigOverrider(
                    {"BigQuery.max_upload_failures": 2}):
                    for message in messages:
                        plugin.ProcessResponses([message])
                    plugin.Flush()

                    # We shouldn't call insertdata at all because we have passed max
                    # failures already
                    self.assertEqual(
                        mock_bigquery.return_value.InsertData.call_count, 0)

        expected_line_counts = {
            "ExportedFile": 2,
            "ExportedProcess": 4,
            "AutoExportedSoftwarePackage": 2
        }
        for output_name in [
                "ExportedFile", "ExportedProcess",
                "AutoExportedSoftwarePackage"
        ]:
            data_fd = aff4.FACTORY.Open(self.base_urn.Add(
                "C-1000000000000000_Results_%s_1445995873.data" % output_name),
                                        token=self.token)
            actual_fd = gzip.GzipFile(None, "r", 9, data_fd)
            self.assertEqual(sum(1 for line in actual_fd),
                             expected_line_counts[output_name])