Beispiel #1
0
    def testRaisesOnImproperFieldsUsage2(self):
        with self.SetUpStatsCollector(self._CreateStatsCollector):
            counter = metrics.Counter(
                "testRaisesOnImproperFieldsUsage2_counter",
                fields=[("dimension", str)])
            int_gauge = metrics.Gauge(
                "testRaisesOnImproperFieldsUsage2_int_gauge",
                int,
                fields=[("dimension", str)])
            event_metric = metrics.Event(
                "testRaisesOnImproperFieldsUsage2_event_metric",
                fields=[("dimension", str)])

        # Check for counters
        with self.assertRaises(ValueError):
            counter.GetValue()
        with self.assertRaises(ValueError):
            counter.GetValue(fields=["a", "b"])

        # Check for gauges
        with self.assertRaises(ValueError):
            int_gauge.GetValue()
        with self.assertRaises(ValueError):
            int_gauge.GetValue(fields=["a", "b"])

        # Check for event metrics
        with self.assertRaises(ValueError):
            event_metric.GetValue()
        with self.assertRaises(ValueError):
            event_metric.GetValue(fields=["a", "b"])
Beispiel #2
0
    def testSimpleEventMetric(self):
        with self.SetUpStatsCollector(self._CreateStatsCollector):
            event_metric = metrics.Event("testSimpleEventMetric_event_metric",
                                         bins=[0.0, 0.1, 0.2])

        data = event_metric.GetValue()
        self.assertAlmostEqual(0, data.sum)
        self.assertEqual(0, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 0, 0.2: 0}, data.bins_heights)

        event_metric.RecordEvent(0.15)
        data = event_metric.GetValue()
        self.assertAlmostEqual(0.15, data.sum)
        self.assertEqual(1, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 1, 0.2: 0}, data.bins_heights)

        event_metric.RecordEvent(0.5)
        data = event_metric.GetValue()
        self.assertAlmostEqual(0.65, data.sum)
        self.assertEqual(2, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 1, 0.2: 1}, data.bins_heights)

        event_metric.RecordEvent(-0.1)
        data = event_metric.GetValue()
        self.assertAlmostEqual(0.55, data.sum)
        self.assertEqual(3, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 1, 0.0: 0, 0.1: 1, 0.2: 1}, data.bins_heights)
Beispiel #3
0
    def testEventMetricWithFields(self):
        with self.SetUpStatsCollector(self._CreateStatsCollector):
            event_metric = metrics.Event(
                "testEventMetricWithFields_event_metric",
                bins=[0.0, 0.1, 0.2],
                fields=[("dimension", str)])

        data = event_metric.GetValue(fields=["dimension_value_1"])
        self.assertAlmostEqual(0, data.sum)
        self.assertEqual(0, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 0, 0.2: 0}, data.bins_heights)

        event_metric.RecordEvent(0.15, fields=["dimension_value_1"])
        event_metric.RecordEvent(0.25, fields=["dimension_value_2"])

        data = event_metric.GetValue(fields=["dimension_value_1"])
        self.assertAlmostEqual(0.15, data.sum)
        self.assertEqual(1, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 1, 0.2: 0}, data.bins_heights)

        data = event_metric.GetValue(fields=["dimension_value_2"])
        self.assertAlmostEqual(0.25, data.sum)
        self.assertEqual(1, data.count)
        self.assertEqual([-_INF, 0.0, 0.1, 0.2], list(data.bins))
        self.assertEqual({-_INF: 0, 0.0: 0, 0.1: 0, 0.2: 1}, data.bins_heights)
Beispiel #4
0
    def testMultipleFuncs(self):
        """Tests if multiple decorators produce aggregate stats."""
        with self.SetUpStatsCollector(self._CreateStatsCollector):
            counter = metrics.Counter("testMultipleFuncs_counter")
            event_metric = metrics.Event("testMultipleFuncs_event_metric",
                                         bins=[0, 1, 2])

        @counter.Counted()
        def Func1(n):
            self._Sleep(n)

        @counter.Counted()
        def Func2(n):
            self._Sleep(n)

        @event_metric.Timed()
        def Func3(n):
            self._Sleep(n)

        @event_metric.Timed()
        def Func4(n):
            self._Sleep(n)

        Func1(0.1)
        Func2(0.1)
        self.assertEqual(counter.GetValue(), 2)

        Func3(0.1)
        Func4(1.1)
        m = event_metric.GetValue()
        self.assertEqual(m.bins_heights, {-_INF: 0, 0: 1, 1: 1, 2: 0})
Beispiel #5
0
    def testGetAllMetricsMetadataWorksCorrectlyOnSimpleMetrics(self):
        counter_name = "testGAMM_SimpleMetrics_counter"
        int_gauge_name = "testGAMM_SimpleMetrics_int_gauge"
        event_metric_name = "testGAMM_SimpleMetrics_event_metric"

        with self.SetUpStatsCollector(self._CreateStatsCollector):
            metrics.Counter(counter_name)
            metrics.Gauge(int_gauge_name, int, fields=[("dimension", str)])
            metrics.Event(event_metric_name)

        metadatas = self.collector.GetAllMetricsMetadata()
        self.assertEqual(metadatas[counter_name].metric_type,
                         rdf_stats.MetricMetadata.MetricType.COUNTER)
        self.assertFalse(metadatas[counter_name].fields_defs)

        self.assertEqual(metadatas[int_gauge_name].metric_type,
                         rdf_stats.MetricMetadata.MetricType.GAUGE)
        self.assertEqual(metadatas[int_gauge_name].fields_defs, [
            rdf_stats.MetricFieldDefinition(
                field_name="dimension",
                field_type=rdf_stats.MetricFieldDefinition.FieldType.STR)
        ])

        self.assertEqual(metadatas[event_metric_name].metric_type,
                         rdf_stats.MetricMetadata.MetricType.EVENT)
        self.assertFalse(metadatas[event_metric_name].fields_defs)
Beispiel #6
0
  def testTimedDecorator(self):
    with self.SetUpStatsCollector(
        default_stats_collector.DefaultStatsCollector()):
      event = metrics.Event("efoo", fields=[("bar", str)])

    @event.Timed(fields=["baz"])
    def Foo():
      pass

    with self.assertStatsCounterDelta(1, event, fields=["baz"]):
      Foo()
Beispiel #7
0
    def testCombiningDecorators(self):
        with self.SetUpStatsCollector(self._CreateStatsCollector):
            counter = metrics.Counter("testCombiningDecorators_counter")
            event_metric = metrics.Event(
                "testCombiningDecorators_event_metric", bins=[0.0, 0.1, 0.2])

        @event_metric.Timed()
        @counter.Counted()
        def OverdecoratedFunc(n):
            self._Sleep(n)

        OverdecoratedFunc(0.02)

        # Check if all vars get updated
        m = event_metric.GetValue()
        self.assertEqual(m.bins_heights, {-_INF: 0, 0: 1, 0.1: 0, 0.2: 0})

        self.assertEqual(counter.GetValue(), 1)
Beispiel #8
0
  def testRaisesOnImproperFieldsUsage1(self):
    with self.SetUpStatsCollector(self._CreateStatsCollector()):
      counter = metrics.Counter("testRaisesOnImproperFieldsUsage1_counter")
      int_gauge = metrics.Gauge("testRaisesOnImproperFieldsUsage1_int_gauge",
                                int)
      event_metric = metrics.Event(
          "testRaisesOnImproperFieldsUsage1_event_metric")

    # Check for counters
    with self.assertRaises(ValueError):
      counter.GetValue(fields=["a"])

    # Check for gauges
    with self.assertRaises(ValueError):
      int_gauge.GetValue(fields=["a"])

    # Check for event metrics
    with self.assertRaises(ValueError):
      event_metric.GetValue(fields=["a", "b"])
Beispiel #9
0
    def testExceptionHandling(self):
        with self.SetUpStatsCollector(self._CreateStatsCollector):
            counter = metrics.Counter("testExceptionHandling_counter")
            event_metric = metrics.Event("testExceptionHandling_event_metric",
                                         bins=[0, 0.1, 0.2])

        @event_metric.Timed()
        @counter.Counted()
        def RaiseFunc(n):
            self._Sleep(n)
            raise Exception()

        with self.assertRaises(Exception):
            RaiseFunc(0.11)

        # Check if all vars get updated
        m = event_metric.GetValue()
        self.assertEqual(m.bins_heights, {-_INF: 0, 0: 0, 0.1: 1, 0.2: 0})

        self.assertEqual(counter.GetValue(), 1)
Beispiel #10
0
    def testBinnedTimings(self):
        event_metric_name = "testMaps_event_metric"

        with self.SetUpStatsCollector(self._CreateStatsCollector):
            event_metric = metrics.Event(event_metric_name, bins=[0, 0.1, 0.2])

        @event_metric.Timed()
        def TimedFunc(n):
            self._Sleep(n)

        m = event_metric.GetValue()
        self.assertEqual(m.bins_heights, {-_INF: 0, 0: 0, 0.1: 0, 0.2: 0})

        for _ in range(3):
            TimedFunc(0.01)

        m = event_metric.GetValue()
        self.assertEqual(m.bins_heights, {-_INF: 0, 0: 3, 0.1: 0, 0.2: 0})

        TimedFunc(0.11)
        m = event_metric.GetValue()
        self.assertEqual(m.bins_heights, {-_INF: 0, 0: 3, 0.1: 1, 0.2: 0})
Beispiel #11
0
    def testGetMetricFieldsWorksCorrectly(self):
        with self.SetUpStatsCollector(self._CreateStatsCollector):
            counter = metrics.Counter(
                "testGetMetricFieldsWorksCorrectly_counter",
                fields=[("dimension1", str), ("dimension2", str)])
            int_gauge = metrics.Gauge(
                "testGetMetricFieldsWorksCorrectly_int_gauge",
                int,
                fields=[("dimension", str)])
            event_metric = metrics.Event(
                "testGetMetricFieldsWorksCorrectly_event_metric",
                fields=[("dimension", str)])

        counter.Increment(fields=["b", "b"])
        counter.Increment(fields=["a", "c"])
        self.assertCountEqual([("a", "c"), ("b", "b")], counter.GetFields())

        int_gauge.SetValue(20, fields=["a"])
        int_gauge.SetValue(30, fields=["b"])
        self.assertCountEqual([("a", ), ("b", )], int_gauge.GetFields())

        event_metric.RecordEvent(0.1, fields=["a"])
        event_metric.RecordEvent(0.1, fields=["b"])
        self.assertCountEqual([("a", ), ("b", )], event_metric.GetFields())
Beispiel #12
0
 def testEventRegistration(self):
   with self.SetUpStatsCollector(
       default_stats_collector.DefaultStatsCollector()):
     metrics.Event("efoo")
   self.assertIsNotNone(self.collector.GetMetricMetadata("efoo"))
Beispiel #13
0
from google.protobuf import timestamp_pb2

from grr_response_core import config
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.rdfvalues import flows as rdf_flows
from grr_response_core.lib.util import text
from grr_response_core.stats import metrics
from grr_response_server import data_store
from grr_response_server import fleetspeak_connector
from fleetspeak.src.common.proto.fleetspeak import common_pb2 as fs_common_pb2
from fleetspeak.src.common.proto.fleetspeak import system_pb2 as fs_system_pb2
from fleetspeak.src.server.proto.fleetspeak_server import admin_pb2
from fleetspeak.src.server.proto.fleetspeak_server import resource_pb2

FLEETSPEAK_CALL_LATENCY = metrics.Event("fleetspeak_call_latency",
                                        fields=[("call", str)])

WRITE_SINGLE_TRY_TIMEOUT = datetime.timedelta(seconds=30)
WRITE_TOTAL_TIMEOUT = datetime.timedelta(seconds=300)

READ_SINGLE_TRY_TIMEOUT = datetime.timedelta(seconds=60)
READ_TOTAL_TIMEOUT = datetime.timedelta(seconds=120)


def IsFleetspeakEnabledClient(grr_id):
    """Returns whether the provided GRR id is a Fleetspeak client."""
    if grr_id is None:
        return False

    md = data_store.REL_DB.ReadClientMetadata(grr_id)
    if not md:
Beispiel #14
0
 def testRecordEvent(self):
   with self.SetUpStatsCollector(
       default_stats_collector.DefaultStatsCollector()):
     event = metrics.Event("efoo", fields=[("bar", str)])
   with self.assertStatsCounterDelta(1, event, fields=["baz"]):
     event.RecordEvent(42, fields=["baz"])
Beispiel #15
0
import time
from typing import Dict, Iterable, List, Optional

from grr_response_core.lib import rdfvalue
from grr_response_core.lib.util import precondition
from grr_response_core.stats import metrics
from grr_response_server.rdfvalues import objects as rdf_objects

# Global blob stores registry.
#
# NOTE: this is a rudimentary registry that will be migrated to the uniform
# registry approach by hanuszczak@ (currently in the works).
REGISTRY = {}

BLOB_STORE_POLL_HIT_LATENCY = metrics.Event(
    "blob_store_poll_hit_latency",
    bins=[0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50])
BLOB_STORE_POLL_HIT_ITERATION = metrics.Event("blob_store_poll_hit_iteration",
                                              bins=[1, 2, 5, 10, 20, 50])


class BlobStoreTimeoutError(Exception):
    """An exception class raised when certain blob store operation times out."""


class BlobStore(metaclass=abc.ABCMeta):
    """The blob store base class."""
    def WriteBlobsWithUnknownHashes(self, blobs_data):
        """Writes the contents of the given blobs, using their hash as BlobID.

    Args:
Beispiel #16
0
from __future__ import division
from __future__ import unicode_literals

import functools
import logging
import time

from typing import Text

from grr_response_core.lib import utils
from grr_response_core.lib.util import precondition
from grr_response_core.stats import metrics
from grr_response_server.databases import db

DB_REQUEST_LATENCY = metrics.Event("db_request_latency",
                                   fields=[("call", str)],
                                   bins=[0.05 * 1.2**x for x in range(30)
                                         ])  # 50ms to ~10 secs
DB_REQUEST_ERRORS = metrics.Counter("db_request_errors",
                                    fields=[("call", str), ("type", str)])


class Error(Exception):
    pass


class FlowIDIsNotAnIntegerError(Error):
    pass


class OutputPluginIDIsNotAnIntegerError(Error):
    pass
Beispiel #17
0
from grr_response_core import config
from grr_response_core.lib.util import compatibility
from grr_response_core.lib.util import precondition
from grr_response_core.stats import metrics
from grr_response_server import blob_store
from grr_response_server.rdfvalues import objects as rdf_objects

# Maximum queue length, where each queue entry can consist of multiple blobs.
# Thus the number of enqueued blobs can be considerably bigger. This only
# serves as a basic measure to prevent unbounded memory growth.
_SECONDARY_WRITE_QUEUE_MAX_LENGTH = 30


DUAL_BLOB_STORE_LATENCY = metrics.Event(
    "dual_blob_store_latency",
    fields=[("backend_class", str), ("method", str)],
    bins=[0.05, 0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50])
DUAL_BLOB_STORE_OP_SIZE = metrics.Event(
    "dual_blob_store_op_size",
    fields=[("backend_class", str), ("method", str)],
    bins=[0, 1, 2, 5, 10, 20, 50, 100, 200, 500])
DUAL_BLOB_STORE_SUCCESS_COUNT = metrics.Counter(
    "dual_blob_store_success_count",
    fields=[("backend_class", str), ("method", str)])
DUAL_BLOB_STORE_ERROR_COUNT = metrics.Counter(
    "dual_blob_store_error_count",
    fields=[("backend_class", str), ("method", str)])
DUAL_BLOB_STORE_DISCARD_COUNT = metrics.Counter(
    "dual_blob_store_discard_count",
    fields=[("backend_class", str), ("method", str)])
Beispiel #18
0
from grr_response_core.lib.util import compatibility
from grr_response_core.lib.util import precondition
from grr_response_core.lib.util.compat import json
from grr_response_core.stats import metrics
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server.gui import api_auth_manager
from grr_response_server.gui import api_call_context
from grr_response_server.gui import api_call_handler_base
from grr_response_server.gui import api_call_router
from grr_response_server.gui import api_value_renderers
from grr_response_server.gui import http_response


API_METHOD_LATENCY = metrics.Event(
    "api_method_latency",
    fields=[("method_name", str), ("protocol", str), ("status", str)])
API_ACCESS_PROBE_LATENCY = metrics.Event(
    "api_access_probe_latency",
    fields=[("method_name", str), ("protocol", str), ("status", str)])


class Error(Exception):
  pass


class PostRequestParsingError(Error):
  pass


class UnsupportedHttpMethod(Error):
Beispiel #19
0
from grr_response_core.stats import metrics
from grr_response_server import data_store
from grr_response_server import hunt
from grr_response_server import threadpool
from grr_response_server.rdfvalues import cronjobs as rdf_cronjobs

# The maximum number of log-messages to store in the DB for a given cron-job
# run.
_MAX_LOG_MESSAGES = 20


CRON_JOB_FAILURE = metrics.Counter(
    "cron_job_failure", fields=[("cron_job_id", str)])
CRON_JOB_TIMEOUT = metrics.Counter(
    "cron_job_timeout", fields=[("cron_job_id", str)])
CRON_JOB_LATENCY = metrics.Event(
    "cron_job_latency", fields=[("cron_job_id", str)])

CRON_JOB_USERNAME = "******"


class Error(Exception):
  pass


class OneOrMoreCronJobsFailedError(Error):

  def __init__(self, failure_map):
    message = "One or more cron jobs failed unexpectedly: " + ", ".join(
        "%s=%s" % (k, v) for k, v in failure_map.items())
    super().__init__(message)
    self.failure_map = failure_map
Beispiel #20
0
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server.gui import api_auth_manager
from grr_response_server.gui import api_call_context
from grr_response_server.gui import api_call_handler_base
from grr_response_server.gui import api_call_router
from grr_response_server.gui import api_value_renderers
from grr_response_server.gui import http_response

_FIELDS = (
    ("method_name", str),
    ("protocol", str),
    ("status", str),
    ("origin", str),
)
API_METHOD_LATENCY = metrics.Event("api_method_latency", fields=_FIELDS)
API_ACCESS_PROBE_LATENCY = metrics.Event("api_access_probe_latency",
                                         fields=_FIELDS)


class Error(Exception):
    pass


class PostRequestParsingError(Error):
    pass


class UnsupportedHttpMethod(Error):
    pass
Beispiel #21
0
FRONTEND_ACTIVE_COUNT = metrics.Gauge("frontend_active_count",
                                      int,
                                      fields=[("source", str)])
FRONTEND_MAX_ACTIVE_COUNT = metrics.Gauge("frontend_max_active_count", int)
FRONTEND_HTTP_REQUESTS = metrics.Counter("frontend_http_requests",
                                         fields=[("action", str),
                                                 ("protocol", str)])
FRONTEND_IN_BYTES = metrics.Counter("frontend_in_bytes",
                                    fields=[("source", str)])
FRONTEND_OUT_BYTES = metrics.Counter("frontend_out_bytes",
                                     fields=[("source", str)])
FRONTEND_REQUEST_COUNT = metrics.Counter("frontend_request_count",
                                         fields=[("source", str)])
FRONTEND_INACTIVE_REQUEST_COUNT = metrics.Counter(
    "frontend_inactive_request_count", fields=[("source", str)])
FRONTEND_REQUEST_LATENCY = metrics.Event("frontend_request_latency",
                                         fields=[("source", str)])
GRR_FRONTENDSERVER_HANDLE_TIME = metrics.Event(
    "grr_frontendserver_handle_time")
GRR_FRONTENDSERVER_HANDLE_NUM = metrics.Counter(
    "grr_frontendserver_handle_num")
GRR_MESSAGES_SENT = metrics.Counter("grr_messages_sent")
GRR_UNIQUE_CLIENTS = metrics.Counter("grr_unique_clients")


class ServerCommunicator(communicator.Communicator):
    """A communicator which stores certificates using the relational db."""
    def __init__(self, certificate, private_key):
        super().__init__(certificate=certificate, private_key=private_key)
        self.common_name = self.certificate.GetCN()

    def _GetRemotePublicKey(self, common_name):
from grr_response_core.stats import metrics

STOP_MESSAGE = "Stop message"

THREADPOOL_OUTSTANDING_TASKS = metrics.Gauge("threadpool_outstanding_tasks",
                                             int,
                                             fields=[("pool_name", str)])
THREADPOOL_THREADS = metrics.Gauge("threadpool_threads",
                                   int,
                                   fields=[("pool_name", str)])
THREADPOOL_CPU_USE = metrics.Gauge("threadpool_cpu_use",
                                   float,
                                   fields=[("pool_name", str)])
THREADPOOL_TASK_EXCEPTIONS = metrics.Counter("threadpool_task_exceptions",
                                             fields=[("pool_name", str)])
THREADPOOL_WORKING_TIME = metrics.Event("threadpool_working_time",
                                        fields=[("pool_name", str)])
THREADPOOL_QUEUEING_TIME = metrics.Event("threadpool_queueing_time",
                                         fields=[("pool_name", str)])


class Error(Exception):
    pass


class ThreadPoolNotStartedError(Error):
    """Raised when a task is added to a not-yet-started pool."""


class DuplicateThreadpoolError(Error):
    """Raised when a thread pool with the same name already exists."""