def test_parse_count_and_rate(): s_metrics = structured_metrics.StructuredMetrics() s_metrics.load_plugins() tags_base = { 'n1': 'foo', 'n2': 'req', 'plugin': 'catchall_statsd', 'source': 'statsd', } def get_proto2(key, target_type, unit, updates={}): return testhelpers.get_proto2(key, tags_base, target_type, unit, updates) key = "stats.foo.req" expected = get_proto2(key, 'rate', 'unknown/s') real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats_counts.foo.req" expected = get_proto2(key, 'count', 'unknown') real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0]
def test_native_proto2_disabled(): # by default, the plugin ignores them s_metrics = structured_metrics.StructuredMetrics() s_metrics.load_plugins() key = "foo.bar=blah.baz" real = s_metrics.list_metrics([key]) assert len(real) == 0
def test_native_proto2_enabled(): DummyCfg = type('DummyCfg', (object, ), {}) DummyCfg.process_native_proto2 = True s_metrics = structured_metrics.StructuredMetrics(DummyCfg) s_metrics.load_plugins() key = "foo.bar=blah.baz.target_type=rate.unit=MiB/d" real = s_metrics.list_metrics([key]) assert len(real) == 1 assert real.values()[0] == { 'id': key, 'tags': { 'n1': 'foo', 'bar': 'blah', 'n3': 'baz', 'target_type': 'rate', 'unit': 'MiB/d' } }
def test_simple(): s_metrics = structured_metrics.StructuredMetrics() s_metrics.load_plugins() tags_base = { 'plugin': 'catchall', 'source': 'unknown', } def get_proto2(key, target_type, unit, updates={}): return testhelpers.get_proto2(key, tags_base, target_type, unit, updates) key = "foo.bar" expected = get_proto2(key, 'unknown', 'unknown', { 'n1': 'foo', 'n2': 'bar' }) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0]
os.chdir(os.path.dirname(__file__)) logger = logging.getLogger('update_metrics') logger.setLevel(logging.DEBUG) chandler = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') chandler.setFormatter(formatter) logger.addHandler(chandler) if config.log_file: fhandler = logging.FileHandler(config.log_file) fhandler.setFormatter(formatter) logger.addHandler(fhandler) try: backend = Backend(config, logger) s_metrics = structured_metrics.StructuredMetrics(config, logger) errors = s_metrics.load_plugins() if len(errors) > 0: logger.warn('errors encountered while loading plugins:') for e in errors: print '\t%s' % e logger.info("fetching/saving metrics from graphite...") backend.download_metrics_json() logger.info("generating structured metrics data...") backend.update_data(s_metrics) logger.info("success!") except Exception, e: logger.error("sorry, something went wrong: %s", e) from traceback import print_exc print_exc() sys.exit(2)
def test_load(): s_metrics = structured_metrics.StructuredMetrics() errors = s_metrics.load_plugins() assert len(errors) == 0
import os import structured_metrics app_dir = os.path.dirname(__file__) if app_dir: os.chdir(app_dir) import config config = make_config(config) import preferences if not config.alerting: print "alerting disabled in config" os.exit(0) s_metrics = structured_metrics.StructuredMetrics(config) db = Db(config.alerting_db) rules = db.get_rules() output = EmailOutput(config) def submit_maybe(result): if result.to_report(): output.submit(result) db.save_notification(result) print "sent notification!" else: print "no notification"
import config from backend import Backend, MetricsError import structured_metrics os.chdir(os.path.dirname(__file__)) logger = logging.getLogger('update_metrics') logger.setLevel(logging.DEBUG) chandler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') chandler.setFormatter(formatter) logger.addHandler(chandler) if config.log_file: fhandler = logging.FileHandler(config.log_file) fhandler.setFormatter(formatter) logger.addHandler(fhandler) try: backend = Backend(config) s_metrics = structured_metrics.StructuredMetrics() s_metrics.load_plugins() logger.info("fetching/saving metrics from graphite...") backend.download_metrics_json() logger.info("generating structured metrics data...") backend.update_data(s_metrics) logger.info("success!") except Exception, e: logger.error("sorry, something went wrong: %s", e) sys.exit(2)
def test_parse_timers(): s_metrics = structured_metrics.StructuredMetrics() s_metrics.load_plugins() tags_base = { 'n1': 'memcached_default_get', 'plugin': 'catchall_statsd', 'source': 'statsd', } def get_proto2(key, target_type, unit, updates={}): return testhelpers.get_proto2(key, tags_base, target_type, unit, updates) key = "stats.timers.memcached_default_get.count" expected = get_proto2(key, 'count', 'Pckt') real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.count_ps" expected = get_proto2(key, 'rate', 'Pckt/s') real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.lower" expected = get_proto2(key, 'gauge', 'ms', {'type': 'lower'}) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.mean" expected = get_proto2(key, 'gauge', 'ms', {'type': 'mean'}) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.mean_90" expected = get_proto2(key, 'gauge', 'ms', {'type': 'mean_90'}) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.median" expected = get_proto2(key, 'gauge', 'ms', {'type': 'median'}) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.std" expected = get_proto2(key, 'gauge', 'ms', {'type': 'std'}) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.sum" expected = get_proto2(key, 'gauge', 'ms', {'type': 'sum'}) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.sum_90" expected = get_proto2(key, 'gauge', 'ms', {'type': 'sum_90'}) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.upper" expected = get_proto2(key, 'gauge', 'ms', {'type': 'upper'}) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.upper_90" expected = get_proto2(key, 'gauge', 'ms', {'type': 'upper_90'}) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.histogram.bin_0_01" expected = get_proto2(key, 'gauge', 'freq_abs', { 'bin_upper': '0.01', 'orig_unit': 'ms' }) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.histogram.bin_5" expected = get_proto2(key, 'gauge', 'freq_abs', { 'bin_upper': '5', 'orig_unit': 'ms' }) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0] key = "stats.timers.memcached_default_get.histogram.bin_inf" expected = get_proto2(key, 'gauge', 'freq_abs', { 'bin_upper': 'inf', 'orig_unit': 'ms' }) real = s_metrics.list_metrics([key]) assert len(real) == 1 assert expected == real.values()[0]