Пример #1
0
    def test_event_title(self):
        stats = MetricsAggregator('myhost', utf8_decoding=True)
        stats.submit_packets('_e{0,4}:|text')
        stats.submit_packets(u'_e{9,4}:2intitulé|text'.encode('utf-8')) # comes from socket
        stats.submit_packets('_e{14,4}:3title content|text')
        stats.submit_packets('_e{14,4}:4title|content|text')
        stats.submit_packets('_e{13,4}:5title\\ntitle|text') # \n stays escaped

        events = self.sort_events(stats.flush_events())

        assert len(events) == 5

        nt.assert_equal(events[0]['msg_title'], '')
        nt.assert_equal(events[1]['msg_title'], u'2intitulé')
        nt.assert_equal(events[2]['msg_title'], '3title content')
        nt.assert_equal(events[3]['msg_title'], '4title|content')
        nt.assert_equal(events[4]['msg_title'], '5title\\ntitle')
Пример #2
0
    def test_sampled_histogram(self):
        # Submit a sampled histogram.
        # The min is not enabled by default
        stats = MetricsAggregator(
            'myhost',
            histogram_aggregates=DEFAULT_HISTOGRAM_AGGREGATES+['min']
        )
        stats.submit_packets('sampled.hist:5|h|@0.5')


        # Assert we scale up properly.
        metrics = self.sort_metrics(stats.flush())
        p95, pavg, pcount, pmax, pmed, pmin = self.sort_metrics(metrics)

        nt.assert_equal(pcount['points'][0][1], 2)
        for p in [p95, pavg, pmed, pmax, pmin]:
            nt.assert_equal(p['points'][0][1], 5)
Пример #3
0
    def test_histogram_normalization(self):
        # The min is not enabled by default
        stats = MetricsAggregator(
            'myhost',
            interval=10,
            histogram_aggregates=DEFAULT_HISTOGRAM_AGGREGATES+['min']
        )
        for i in range(5):
            stats.submit_packets('h1:1|h')
        for i in range(20):
            stats.submit_packets('h2:1|h')

        metrics = self.sort_metrics(stats.flush())
        _, _, h1count, _, _, _, _, _, h2count, _, _, _ = metrics

        nt.assert_equal(h1count['points'][0][1], 0.5)
        nt.assert_equal(h2count['points'][0][1], 2)
Пример #4
0
    def test_batch_submission(self):
        # Submit a sampled histogram.
        stats = MetricsAggregator('myhost')
        metrics = [
            'counter:1|c',
            'counter:1|c',
            'gauge:1|g'
        ]
        packet = '\n'.join(metrics)
        stats.submit_packets(packet)

        metrics = self.sort_metrics(stats.flush()[:-1])
        assert len(metrics) == 2

        counter, gauge = metrics
        assert counter['points'][0][1] == 2
        assert gauge['points'][0][1] == 1
Пример #5
0
    def test_rate_errors(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('my.rate:10|_dd-r')
        # Sleep 1 second so the time interval > 0 (timestamp is converted to an int)
        time.sleep(1)
        stats.submit_packets('my.rate:9|_dd-r')

        # Since the difference < 0 we shouldn't get a value
        metrics = stats.flush()
        nt.assert_equal(len(metrics), 0)

        stats.submit_packets('my.rate:10|_dd-r')
        # Trying to have the times be the same
        stats.submit_packets('my.rate:40|_dd-r')

        metrics = stats.flush()
        nt.assert_equal(len(metrics), 0)
Пример #6
0
    def test_service_check_basic(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('_sc|check.1|0')
        stats.submit_packets('_sc|check.2|1')
        stats.submit_packets('_sc|check.3|2')

        service_checks = self.sort_service_checks(stats.flush_service_checks())

        assert len(service_checks) == 3
        first, second, third = service_checks

        assert first['check'] == 'check.1'
        assert first['status'] == 0
        assert second['check'] == 'check.2'
        assert second['status'] == 1
        assert third['check'] == 'check.3'
        assert third['status'] == 2
Пример #7
0
    def test_service_check_basic(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('_sc|check.1|0')
        stats.submit_packets('_sc|check.2|1')
        stats.submit_packets('_sc|check.3|2')

        service_checks = self.sort_service_checks(stats.flush_service_checks())

        assert len(service_checks) == 3
        first, second, third = service_checks

        nt.assert_equal(first['check'], 'check.1')
        nt.assert_equal(first['status'], 0)
        nt.assert_equal(second['check'], 'check.2')
        nt.assert_equal(second['status'], 1)
        nt.assert_equal(third['check'], 'check.3')
        nt.assert_equal(third['status'], 2)
Пример #8
0
    def __init__(self, name, init_config, agentConfig, instances=None):
        """
        Initialize a new check.

        :param name: The name of the check
        :param init_config: The config for initializing the check
        :param agentConfig: The global configuration for the agent
        :param instances: A list of configuration objects for each instance.
        """
        from aggregator import MetricsAggregator

        self._enabled_checks.append(name)
        self._enabled_checks = list(set(self._enabled_checks))

        self.name = name
        self.init_config = init_config or {}
        self.agentConfig = agentConfig
        self.in_developer_mode = agentConfig.get('developer_mode') and psutil
        self._internal_profiling_stats = None

        self.hostname = agentConfig.get('checksd_hostname') or get_hostname(
            agentConfig)
        self.log = logging.getLogger('%s.%s' % (__name__, name))
        self.aggregator = MetricsAggregator(
            self.hostname,
            formatter=agent_formatter,
            recent_point_threshold=agentConfig.get('recent_point_threshold',
                                                   None),
            histogram_aggregates=agentConfig.get('histogram_aggregates'),
            histogram_percentiles=agentConfig.get('histogram_percentiles'))

        if Platform.is_linux() and psutil is not None:
            procfs_path = self.agentConfig.get('procfs_path',
                                               '/proc').rstrip('/')
            psutil.PROCFS_PATH = procfs_path

        self.events = []
        self.service_checks = []
        self.instances = instances or []
        self.warnings = []
        self.library_versions = None
        self.last_collection_time = defaultdict(int)
        self._instance_metadata = []
        self.svc_metadata = []
        self.historate_dict = {}
Пример #9
0
    def __init__(self, name, init_config, agentConfig):
        """
        Initialize a new check.

        :param name: The name of the check
        :param init_config: The config for initializing the check
        :param agentConfig: The global configuration for the agent
        """
        from aggregator import MetricsAggregator

        self.name = name
        self.init_config = init_config
        self.agentConfig = agentConfig
        self.hostname = gethostname(agentConfig)
        self.log = logging.getLogger('checks.%s' % name)
        self.aggregator = MetricsAggregator(self.hostname,
                                            formatter=agent_formatter)
        self.events = []
    def test_string_sets(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('my.set:string|s')
        stats.submit_packets('my.set:sets|s')
        stats.submit_packets('my.set:sets|s')
        stats.submit_packets('my.set:test|s')
        stats.submit_packets('my.set:test|s')
        stats.submit_packets('my.set:test|s')

        # Assert that it's treated normally.
        metrics = stats.flush()
        assert len(metrics) == 1
        m = metrics[0]
        assert m['metric'] == 'my.set'
        assert m['points'][0][1] == 3

        # Assert there are no more sets
        assert not stats.flush()
Пример #11
0
    def test_event_text(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('_e{2,0}:t1|')
        stats.submit_packets('_e{2,12}:t2|text|content')
        stats.submit_packets(
            '_e{2,23}:t3|First line\\nSecond line')  # \n is a newline
        stats.submit_packets(
            u'_e{2,19}:t4|♬ †øU †øU ¥ºu T0µ ♪')  # utf-8 compliant

        events = self.sort_events(stats.flush_events())

        assert len(events) == 4
        first, second, third, fourth = events

        nt.assert_equal(first['msg_text'], '')
        nt.assert_equal(second['msg_text'], 'text|content')
        nt.assert_equal(third['msg_text'], 'First line\nSecond line')
        nt.assert_equal(fourth['msg_text'], u'♬ †øU †øU ¥ºu T0µ ♪')
Пример #12
0
    def test_string_sets(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('my.set:string|s')
        stats.submit_packets('my.set:sets|s')
        stats.submit_packets('my.set:sets|s')
        stats.submit_packets('my.set:test|s')
        stats.submit_packets('my.set:test|s')
        stats.submit_packets('my.set:test|s')

        # Assert that it's treated normally.
        metrics = stats.flush()
        nt.assert_equal(len(metrics), 1)
        m = metrics[0]
        nt.assert_equal(m['metric'], 'my.set')
        nt.assert_equal(m['points'][0][1], 3)

        # Assert there are no more sets
        assert not stats.flush()
    def test_custom_single_percentile(self):
        stats = MetricsAggregator('myhost', histogram_percentiles=[0.4])

        assert stats.metric_config[Histogram]['percentiles'] == [0.40]

        for i in xrange(20):
            stats.submit_packets('myhistogram:{0}|h'.format(i))

        metrics = stats.flush()

        assert len(metrics) == 5

        value_by_type = {}
        for k in metrics:
            value_by_type[k['metric'][len('myhistogram') +
                                      1:]] = k['points'][0][1]

        assert value_by_type['40percentile'] == 7
Пример #14
0
    def test_sets(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('my.set:10|s')
        stats.submit_packets('my.set:20|s')
        stats.submit_packets('my.set:20|s')
        stats.submit_packets('my.set:30|s')
        stats.submit_packets('my.set:30|s')
        stats.submit_packets('my.set:30|s')

        # Assert that it's treated normally.
        metrics = stats.flush()
        nt.assert_equal(len(metrics), 1)
        m = metrics[0]
        nt.assert_equal(m[0], 'my.set')
        nt.assert_equal(m[2], 3)

        # Assert there are no more sets
        assert not stats.flush()
Пример #15
0
    def test_histogram_counter(self):
        # Test whether histogram.count == increment
        # same deal with a sample rate
        cnt = 100000
        for run in [1, 2]:
            stats = MetricsAggregator('myhost')
            for i in xrange(cnt):
                if run == 2:
                    stats.submit_packets('test.counter:1|c|@0.5')
                    stats.submit_packets('test.hist:1|ms|@0.5')
                else:
                    stats.submit_packets('test.counter:1|c')
                    stats.submit_packets('test.hist:1|ms')
            metrics = self.sort_metrics(stats.flush())
            assert len(metrics) > 0

            nt.assert_equal([m['points'][0][1] for m in metrics if m['metric'] == 'test.counter'], [cnt * run])
            nt.assert_equal([m['points'][0][1] for m in metrics if m['metric'] == 'test.hist.count'], [cnt * run])
Пример #16
0
    def test_event_text_utf8(self):
        stats = MetricsAggregator('myhost', utf8_decoding=True)
        # Should raise because content is not encoded

        self.assertRaises(Exception, stats.submit_packets,
                          u'_e{2,19}:t4|♬ †øU †øU ¥ºu T0µ ♪')
        stats.submit_packets(u'_e{2,19}:t4|♬ †øU †øU ¥ºu T0µ ♪'.encode(
            'utf-8'))  # utf-8 compliant
        # Normal packet
        stats.submit_packets(
            '_e{2,23}:t3|First line\\nSecond line')  # \n is a newline

        events = self.sort_events(stats.flush_events())

        assert len(events) == 2

        nt.assert_equal(events[0]['msg_text'], 'First line\nSecond line')
        nt.assert_equal(events[1]['msg_text'], u'♬ †øU †øU ¥ºu T0µ ♪')
Пример #17
0
    def test_rate(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('my.rate:10|_dd-r')
        # Sleep 1 second so the time interval > 0
        time.sleep(1)
        stats.submit_packets('my.rate:40|_dd-r')

        # Check that the rate is calculated correctly
        metrics = stats.flush()
        assert len(metrics) == 2
        m = metrics[0]
        assert m['metric'] == 'my.rate'
        assert m['points'][0][1] == 30

        # Assert that no more rates are given
        metrics = stats.flush()
        assert len(metrics) == 1
        assert metrics[0]['metric'] == 'datadog.agent.running'
Пример #18
0
    def test_event_title(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('_e{0,4}:|text')
        stats.submit_packets(u'_e{9,4}:2intitulé|text')
        stats.submit_packets('_e{14,4}:3title content|text')
        stats.submit_packets('_e{14,4}:4title|content|text')
        stats.submit_packets(
            '_e{13,4}:5title\\ntitle|text')  # \n stays escaped

        events = self.sort_events(stats.flush_events())

        assert len(events) == 5
        first, second, third, fourth, fifth = events

        nt.assert_equal(first['msg_title'], '')
        nt.assert_equal(second['msg_title'], u'2intitulé')
        nt.assert_equal(third['msg_title'], '3title content')
        nt.assert_equal(fourth['msg_title'], '4title|content')
        nt.assert_equal(fifth['msg_title'], '5title\\ntitle')
Пример #19
0
    def test_packet_string_endings(self):
        stats = MetricsAggregator('myhost')

        stats.submit_packets('line_ending.generic:500|c')
        stats.submit_packets('line_ending.unix:400|c\n')
        stats.submit_packets('line_ending.windows:300|c\r\n')

        metrics = self.sort_metrics(stats.flush()[:-1])
        assert len(metrics) == 3

        first, second, third = metrics
        assert first['metric'] == 'line_ending.generic'
        assert first['points'][0][1] == 500

        assert second['metric'] == 'line_ending.unix'
        assert second['points'][0][1] == 400

        assert third['metric'] == 'line_ending.windows'
        assert third['points'][0][1] == 300
Пример #20
0
    def test_service_check_tag_key_ends_with_m(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('_sc|check.1|0|#keym:value')
        stats.submit_packets('_sc|check.2|0|#key2m:value|m:fakeout')
        stats.submit_packets('_sc|check.3|0|#key:valuem:value2,key2:value2|m:fakeoutm\:|h:#5')

        service_checks = self.sort_service_checks(stats.flush_service_checks())
        assert len(service_checks) == 3
        first, second, third = service_checks

        assert first['check'] == 'check.1'
        assert first['tags'] == ['keym:value']
        assert 'message' not in first
        assert second['check'] == 'check.2'
        assert second['tags'] == ['key2m:value']
        assert second['message'] == 'fakeout'
        assert third['check'] == 'check.3'
        assert third['tags'] == sorted(['key:valuem:value2', 'key2:value2'])
        assert third['message'] == 'fakeoutm:|h:#5'
Пример #21
0
    def test_service_check_tag_key_ends_with_m(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('_sc|check.1|0|#keym:value')
        stats.submit_packets('_sc|check.2|0|#key2m:value|m:fakeout')
        stats.submit_packets('_sc|check.3|0|#key:valuem:value2,key2:value2|m:fakeoutm\:|h:#5')

        service_checks = self.sort_service_checks(stats.flush_service_checks())

        assert len(service_checks) == 3
        first, second, third = service_checks

        nt.assert_equal(first['check'], 'check.1')
        nt.assert_equal(first['tags'], ['keym:value'])
        #nt.assert_not_in('message', first)
        nt.assert_equal(second['check'], 'check.2')
        nt.assert_equal(second['tags'], ['key2m:value'])
        nt.assert_equal(second['message'], 'fakeout')
        nt.assert_equal(third['check'], 'check.3')
        nt.assert_equal(third['tags'], sorted(['key:valuem:value2', 'key2:value2']))
        nt.assert_equal(third['message'], 'fakeoutm:|h:#5')
Пример #22
0
    def test_service_check_message(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('_sc|check.1|0|m:testing')
        stats.submit_packets('_sc|check.2|0|m:First line\\nSecond line')
        stats.submit_packets(u'_sc|check.3|0|m:♬ †øU †øU ¥ºu T0µ ♪')
        stats.submit_packets('_sc|check.4|0|m:|t:|m\:|d:')

        service_checks = self.sort_service_checks(stats.flush_service_checks())

        assert len(service_checks) == 4
        first, second, third, fourth = service_checks

        nt.assert_equal(first['check'], 'check.1')
        nt.assert_equal(first['message'], 'testing')
        nt.assert_equal(second['check'], 'check.2')
        nt.assert_equal(second['message'], 'First line\nSecond line')
        nt.assert_equal(third['check'], 'check.3')
        nt.assert_equal(third['message'], u'♬ †øU †øU ¥ºu T0µ ♪')
        nt.assert_equal(fourth['check'], 'check.4')
        nt.assert_equal(fourth['message'], '|t:|m:|d:')
    def test_service_check_message(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('_sc|check.1|0|m:testing')
        stats.submit_packets('_sc|check.2|0|m:First line\\nSecond line')
        stats.submit_packets('_sc|check.3|0|m:♬ †øU †øU ¥ºu T0µ ♪')
        stats.submit_packets(r'_sc|check.4|0|m:|t:|m\:|d:')

        service_checks = self.sort_service_checks(stats.flush_service_checks())

        assert len(service_checks) == 4
        first, second, third, fourth = service_checks

        assert first['check'] == 'check.1'
        assert first['message'] == 'testing'
        assert second['check'] == 'check.2'
        assert second['message'] == 'First line\nSecond line'
        assert third['check'] == 'check.3'
        assert third['message'] == '♬ †øU †øU ¥ºu T0µ ♪'
        assert fourth['check'] == 'check.4'
        assert fourth['message'] == '|t:|m:|d:'
Пример #24
0
    def test_packet_string_endings(self):
        stats = MetricsAggregator('myhost')

        stats.submit_packets('line_ending.generic:500|c')
        stats.submit_packets('line_ending.unix:400|c\n')
        stats.submit_packets('line_ending.windows:300|c\r\n')

        metrics = self.sort_metrics(stats.flush())

        assert len(metrics) == 3

        first, second, third = metrics
        nt.assert_equals(first['metric'], 'line_ending.generic')
        nt.assert_equals(first['points'][0][1], 500)

        nt.assert_equals(second['metric'], 'line_ending.unix')
        nt.assert_equals(second['points'][0][1], 400)

        nt.assert_equals(third['metric'], 'line_ending.windows')
        nt.assert_equals(third['points'][0][1], 300)
Пример #25
0
    def test_sets(self):
        stats = MetricsAggregator('myhost')
        stats.submit_packets('my.set:10|s')
        stats.submit_packets('my.set:20|s')
        stats.submit_packets('my.set:20|s')
        stats.submit_packets('my.set:30|s')
        stats.submit_packets('my.set:30|s')
        stats.submit_packets('my.set:30|s')

        # Assert that it's treated normally.
        metrics = stats.flush()
        assert len(metrics) == 2
        m = metrics[0]
        assert m['metric'] == 'my.set'
        assert m['points'][0][1] == 3

        # Assert there are no more sets
        metrics = stats.flush()
        assert len(metrics) == 1
        assert metrics[0]['metric'] == 'datadog.agent.running'
Пример #26
0
    def test_custom_single_percentile(self):
        configstr = '0.40'
        stats = MetricsAggregator(
            'myhost',
            histogram_percentiles=get_histogram_percentiles(configstr))

        self.assertEquals(stats.metric_config[Histogram]['percentiles'],
                          [0.40], stats.metric_config[Histogram])

        for i in xrange(20):
            stats.submit_packets('myhistogram:{0}|h'.format(i))

        metrics = stats.flush()

        self.assertEquals(len(metrics), 5, metrics)

        value_by_type = {}
        for k in metrics:
            value_by_type[k[0][len('myhistogram') + 1:]] = k[2]

        self.assertEquals(value_by_type['40percentile'], 7, value_by_type)
def test_spurr(subprocess_patch):
    # defer import to test to avoid breaking get_subprocess_output
    # patching.
    from datadog_checks.lparstats import LPARStats

    hostname = 'foo'
    aggregator = MetricsAggregator(
        hostname,
        interval=1.0,
        histogram_aggregates=None,
        histogram_percentiles=None,
    )

    c = LPARStats("lparstats", {}, {}, aggregator)
    c.collect_spurr()
    metrics = c.aggregator.flush(
    )[:-1]  # we remove the datadog.agent.running metric

    expected_metrics = [
        'system.lpar.spurr.user',
        'system.lpar.spurr.sys',
        'system.lpar.spurr.wait',
        'system.lpar.spurr.idle',
        'system.lpar.spurr.user.norm',
        'system.lpar.spurr.sys.norm',
        'system.lpar.spurr.wait.norm',
        'system.lpar.spurr.idle.norm',
        'system.lpar.spurr.user.pct',
        'system.lpar.spurr.sys.pct',
        'system.lpar.spurr.wait.pct',
        'system.lpar.spurr.idle.pct',
        'system.lpar.spurr.user.norm.pct',
        'system.lpar.spurr.sys.norm.pct',
        'system.lpar.spurr.wait.norm.pct',
        'system.lpar.spurr.idle.norm.pct',
    ]

    assert len(metrics) == len(expected_metrics)
    for metric in metrics:
        assert metric['metric'] in expected_metrics
Пример #28
0
def init(config_path=None, use_watchdog=False, use_forwarder=False):
    """Configure the server and the reporting thread.
    """
    c = get_config(parse_args=False, cfg_path=config_path)
    log.debug("Configuration dogstatsd")

    port = c['dogstatsd_port']
    interval = int(c['dogstatsd_interval'])
    api_key = c['api_key']
    non_local_traffic = c['non_local_traffic']

    target = c['dd_url']
    if use_forwarder:
        target = c['dogstatsd_target']

    hostname = get_hostname(c)

    # Create the aggregator (which is the point of communication between the
    # server and reporting threads.
    assert 0 < interval

    aggregator = MetricsAggregator(hostname,
                                   interval,
                                   recent_point_threshold=c.get(
                                       'recent_point_threshold', None))

    # Start the reporting thread.
    reporter = Reporter(interval, aggregator, target, api_key, use_watchdog)

    # Start the server on an IPv4 stack
    # Default to loopback
    server_host = '127.0.0.1'
    # If specified, bind to all addressses
    if non_local_traffic:
        server_host = ''

    server = Server(aggregator, server_host, port)

    return reporter, server, c
Пример #29
0
    def test_counter_normalization(self):
        stats = MetricsAggregator('myhost', interval=10)

        # Assert counters are normalized.
        stats.submit_packets('int:1|c')
        stats.submit_packets('int:4|c')
        stats.submit_packets('int:15|c')

        stats.submit_packets('float:5|c')

        metrics = self.sort_metrics(stats.flush()[:-1])
        assert len(metrics) == 2

        floatc, intc = metrics

        assert floatc['metric'] == 'float'
        assert floatc['points'][0][1] == 0.5
        assert floatc['host'] == 'myhost'

        assert intc['metric'] == 'int'
        assert intc['points'][0][1] == 2
        assert intc['host'] == 'myhost'
Пример #30
0
    def test_counter_normalization(self):
        stats = MetricsAggregator('myhost', interval=10)

        # Assert counters are normalized.
        stats.submit_packets('int:1|c')
        stats.submit_packets('int:4|c')
        stats.submit_packets('int:15|c')

        stats.submit_packets('float:5|c')

        metrics = self.sort_metrics(stats.flush())
        assert len(metrics) == 2

        floatc, intc = metrics

        nt.assert_equal(floatc['metric'], 'float')
        nt.assert_equal(floatc['points'][0][1], 0.5)
        nt.assert_equal(floatc['host'], 'myhost')

        nt.assert_equal(intc['metric'], 'int')
        nt.assert_equal(intc['points'][0][1], 2)
        nt.assert_equal(intc['host'], 'myhost')