def timestat(stat, millis): if not _is_setup(): log.info('STATSD_HOST or STATSD_PORT not set up') return client = Client(settings.STATSD_HOST, settings.STATSD_PORT) client.timing(_statname(stat), millis) log.info('timestat: %s %sm' % (_statname(stat), millis))
def decrstat(stat, count=1): if not _is_setup(): log.info('STATSD_HOST or STATSD_PORT not set up') return client = Client(settings.STATSD_HOST, settings.STATSD_PORT) client.decrement(_statname(stat), count) log.info('decrstat: %s %s' % (_statname(stat), count))
def __init__(self, min_level=NOTSET, include_loggers=NOTSET, exclude_loggers=NOTSET, host=NOTSET, port=NOTSET, prefix=NOTSET, *args, **kwargs): super(GraphiteProcessor, self).__init__(*args, **kwargs) if min_level is NOTSET: min_level = settings.GRAPIHTE_LEVEL if include_loggers is NOTSET: include_loggers = settings.GRAPHITE_INCLUDE_LOGGERS if exclude_loggers is NOTSET: exclude_loggers = settings.GRAPHITE_EXCLUDE_LOGGERS if host is NOTSET: host = settings.GRAPHITE_HOST if port is NOTSET: port = settings.GRAPHITE_PORT if prefix is NOTSET: prefix = settings.GRAPHITE_PREFIX self.min_level = min_level self.include_loggers = include_loggers self.exclude_loggers = exclude_loggers self.host = host self.port = port self.prefix = prefix self.client = Client(host=self.host, port=self.port)
def addstat(stat, n): if not _is_setup(): log.info('STATSD_HOST or STATSD_PORT not set up') return client = Client(settings.STATSD_HOST, settings.STATSD_PORT) client.update_stats(_statname(stat), n) log.info('addstat: %s %s' % (_statname(stat), n))
def setUp(self): self.patchers = [] socket_patcher = mock.patch('pystatsd.statsd.socket.socket') self.mock_socket = socket_patcher.start() self.patchers.append(socket_patcher) self.client = Client() self.addr = (socket.gethostbyname(self.client.host), self.client.port)
def expand_url(orig_url): statz = Client(*settings.STATSD_SERVER) if settings.STATSD_SERVER else None try: url_obj = Url.objects.get(original=orig_url) except Url.DoesNotExist: pass else: if statz: statz.increment('fstv.checked_url.database') return url_obj.target if urlparse(orig_url).scheme not in ('http', 'https'): return None log.debug('Scanning url %s', orig_url) h = httplib2.Http(timeout=10) # We don't get a content-location header when httplib2 follows redirects # for a HEAD, so let's track it ourselves I guess. h.follow_redirects = False redirects = 5 url = orig_url while True: try: resp, cont = h.request(url, method='HEAD', headers={'User-Agent': 'friendstream/1.0'}) except httplib.InvalidURL: log.debug("Oops, %s isn't really an URL at all, skipping", url) return None # TODO: what does httplib2 raise when there's no Location header? (does it raise anything when follow_redirects=False?) except (httplib2.ServerNotFoundError, httplib.BadStatusLine, socket.timeout, socket.error), exc: log.debug("Oops, %s for URL %s (use it for now): %s", type(exc).__name__, url, str(exc)) return url if resp.status in (301, 302, 303, 307): location = resp['location'] try: # Either the location is 7 bits, in which case this works fine... location.decode('ascii') except UnicodeDecodeError: # ...or it's erroneously UTF-8 and we need to %-escape the high bytes. location = quote(location, safe=';/?:@&=+$,%') url = urljoin(url, location) redirects -= 1 if redirects <= 0: log.debug("Oops, hit redirect limit for %s (use it for now)", url) return url continue break
def test_client_create(self): host, port = ('example.com', 8888) client = Client(host=host, port=port, prefix='pystatsd.tests') self.assertEqual(client.host, host) self.assertEqual(client.port, port) self.assertEqual(client.prefix, 'pystatsd.tests') self.assertEqual(client.addr, (socket.gethostbyname(host), port))
def observe_gearman_queue(cfg, stop_event): acs = {} for s in cfg['gearman']: acs[s.split(':')[0]] = GearmanAdminClient([s]) cs = Client(host=cfg['carbon']['CARBON_SERVER'], port=cfg['carbon']['CARBON_PORT'], prefix=cfg['carbon']['USER_SCOPE']) count = 0 pid = os.getpid() while stop_event.is_set() is False: queue = 0 try: for s in acs: try: stat = acs[s].get_status() for task in stat: if task['task'] == 'execute_uc': queue += task['queued'] cs.gauge('emulator.counts.gearman.%s' % s, task['queued']) except Exception: print '= ERROR: Failed to get stats from %s' % s cs.gauge('emulator.counts.gearman.%s' % s, 0) cs.gauge('emulator.counts.files.emulator', get_open_fds()) if not count % 10: print '= (PID: %s [%s]) Gearman-Queue size: %s' % (pid, time.strftime('%H:%M:%S', time.localtime()), queue) count += 1 except Exception: print traceback.format_exc() try: stop_event.wait(10.0) except KeyboardInterrupt: pass print '= Stopping queue observer ... OK'
def backend_statsd(url, stats, prefix): from pystatsd import Client process_num = stats['Process_num'] del (stats['Process_num']) server_name = socket.getfqdn().split('.')[0] prefix = '%s.%s.%s' % (prefix, server_name, int(process_num)) logger.debug('Reporting to prefix: %s' % prefix) server, port = url.split(':') try: pystatsd_client = Client(host=server, port=port, prefix=prefix) except Exception, e: logger.error('Unable to connect to statsd backend %s: %s' % (url, e)) raise
def create_stats_client(): "Create the client which can be used for logging statistics" logger = logging.getLogger("pystatsd.client") logger.addHandler(logging.StreamHandler()) try: stats_server = config.get("admin", {}).get("statsd_server", None) if stats_server: host, port = stats_server.rsplit(":", 1) return Client(host, port) else: return False except Exception: #TBD : Log stats error here return False
def poll_account(account_pk, limited=False): try: account = Account.objects.get(pk=account_pk) except Account.DoesNotExist: return if not account.authinfo: return if account.error: # Don't try to scan accounts that have errors until they're saved again. return if account.user is None: log.debug("Oops, account %r has no user?", account) return if limited: now = datetime.now() if account.user.last_login < now - timedelta(days=7): log.debug("Marking %s's %s stream stale (hasn't logged in for a week)", account.user.username, account.service) account.stale = True account.save() return if account.last_updated > now - timedelta(minutes=14): log.debug("Skipping %s's %s stream (updated too soon)", account.user.username, account.service) return if account.service == 'twitter.com': poll_twitter(account) elif account.service == 'facebook.com': poll_facebook(account) # TODO: vimeo, youtube accounts account.last_updated = datetime.now() account.save() if settings.STATSD_SERVER: statz = Client(*settings.STATSD_SERVER) statz.increment('fstv.account_updated')
def create_stats_client(cfg=config): "Create the client which can be used for logging statistics" logger = logging.getLogger("pystatsd.client") logger.addHandler(logging.StreamHandler()) try: stats_server = cfg.get("admin", {}).get("statsd_server", None) if stats_server: host, port = stats_server.rsplit(":", 1) return Client(host, port) else: logger.critical("Couldn't find statsd_server section in config") return False except Exception as e: logger.critical("Couldn't create stats client - %s", e, exc_info=True) return False
def main(): '''Main entry point for the meter_read CLI.''' args = rcfile('meter_read', docopt(__doc__, version=__version__)) logging.basicConfig() log = logging.getLogger(__name__) if args['verbose']: log.setLevel(logging.DEBUG) sc = Client(args['addr'], args['port']) re_valid = re.compile(args['valid']) re_timer = re.compile(args['timer']) aliasses = json.loads(args.get('aliasses', '{}')) log.debug('found aliasses: %s' % aliasses) log.debug('start reading %s' % args['dev']) s = serial.Serial(port=args['dev'], baudrate=args['baud']) while True: line = s.readline() match = re_valid.match(line) if match: check, name, value = match.groups() name = aliasses.get(name, name) log.debug("{0} {1}".format(name, value)) # send to statsd if re_timer.match(line): sc.timing(name, float(value)) else: sc.gauge(name, float(value)) else: log.debug('error %s' % unicode(line, errors='ignore')) sc.increment('error')
#!/usr/bin/env python from pystatsd import Client, Server sc = Client('rayners.org',8125) sc.timing('python_test.time',500) sc.increment('python_test.inc_int') sc.decrement('python_test.decr_int') srvr = Server(debug=True) srvr.serve()
def connect_node_js(): server = __config.get('carbon', 'carbon_server') port = __config.get('carbon', 'carbon_port') scope = __config.get('carbon', 'user_scope') return Client(host=server, port=port, prefix=scope)
account.error = True account.save() return raise for status in tl: for url_obj in status.urls: url = url_obj.expanded_url or url_obj.url url = expand_url(url) if not url: continue video = video_for_url(url) if not video: continue statz = Client(*settings.STATSD_SERVER) if settings.STATSD_SERVER else None try: us = UserStream.objects.get(user=account.user, video=video) except UserStream.DoesNotExist: log.debug("Adding video %s to %s's stream", url, account.user.username) poster, created = Account.objects.get_or_create(service='twitter.com', ident=status.user.id, defaults={ 'display_name': status.user.name, 'avatar_url': status.user.profile_image_url, 'permalink_url': 'http://twitter.com/%s' % status.user.screen_name, }) created_at = datetime.utcfromtimestamp(status.created_at_in_seconds) log.debug("Saving UserStream for user %r video %r poster %r posted %r", account.user, video, poster, created_at) us = UserStream( user=account.user, video=video,
#!/usr/bin/env python import time from multiprocessing import Process from pystatsd import Client, Server def worker(): srvr = Server(debug=True, flush_interval=500) srvr.serve() p = Process(target=worker, daemon=False) p.start() time.sleep(1) sc = Client('localhost', 8125) sc.timing('python_test.time', 500) sc.increment('python_test.inc_int') sc.decrement('python_test.decr_int') sc.gauge('python_test.gauge', 42) time.sleep(2) p.terminate()
class GraphiteProcessor(Plugin): title = 'Graphite' conf_key = 'graphite' project_conf_form = GraphiteConfigurationForm def __init__(self, min_level=NOTSET, include_loggers=NOTSET, exclude_loggers=NOTSET, host=NOTSET, port=NOTSET, prefix=NOTSET, *args, **kwargs): super(GraphiteProcessor, self).__init__(*args, **kwargs) if min_level is NOTSET: min_level = settings.GRAPIHTE_LEVEL if include_loggers is NOTSET: include_loggers = settings.GRAPHITE_INCLUDE_LOGGERS if exclude_loggers is NOTSET: exclude_loggers = settings.GRAPHITE_EXCLUDE_LOGGERS if host is NOTSET: host = settings.GRAPHITE_HOST if port is NOTSET: port = settings.GRAPHITE_PORT if prefix is NOTSET: prefix = settings.GRAPHITE_PREFIX self.min_level = min_level self.include_loggers = include_loggers self.exclude_loggers = exclude_loggers self.host = host self.port = port self.prefix = prefix self.client = Client(host=self.host, port=self.port) def record_event(self, group, event, fail_silently=True): project = group.project host = self.get_option('host', project) or self.host port = self.get_option('port', project) or self.port prefix = self.get_option('prefix', project) or self.prefix key = '.'.join([prefix, event.message_top]) self.client.increment(key) def should_record(self, group, event): project = group.project host = self.get_option('host', project) or self.host if not host: return False port = self.get_option('port', project) or self.port if not port: return False prefix = self.get_option('prefix', project) or self.prefix if not prefix: return False min_level = self.get_option('min_level', project) or self.min_level if min_level is not None and int(group.level) < min_level: return False include_loggers = self.get_option('include_loggers', project) or self.include_loggers if include_loggers is not None and group.logger not in include_loggers: return False exclude_loggers = self.get_option('exclude_loggers', project) or self.exclude_loggers if exclude_loggers and group.logger in exclude_loggers: return False return True def post_process(self, group, event, is_new, is_sample, **kwargs): if not self.should_record(group, event): return self.record_event(group, event)
# coding=utf-8 from pystatsd import Client sc = Client('example.org', 8125) sc.timing('python_test.time', 500) sc.increment('python_test.inc_int') sc.decrement('python_test.decr_int') sc.gauge('python_test.gauge', 42)
def main(): s = Client('localhost',8125) n = LinuxNetStats() for k,v in n.forStatsd(prefix='host.com.eeqj.net.').items(): s.update_stats(k,v)
#!/usr/bin/env python from pystatsd import Client, Server sc = Client('localhost', 8125) sc.timing('python_test.time', 500) sc.increment('python_test.inc_int') sc.decrement('python_test.decr_int') srvr = Server(debug=True) srvr.serve()
SERVER = config_get('monitor', 'carbon_server', raise_exception=False, default='statsd-exporter-rucio-statsd-exporter') PORT = config_get('monitor', 'carbon_port', raise_exception=False, default=8125) SCOPE = config_get('monitor', 'user_scope', raise_exception=False, default='rucio') print('Instantiating default client with %s %s %s' % (SERVER, PORT, SCOPE)) CLIENT = Client(host=SERVER, port=PORT, prefix=SCOPE) def record_counter(counters, delta=1): """ Log one or more counters by arbitrary amounts :param counters: The counter or a list of counters to be updated. :param delta: The increment for the counter, by default increment by 1. """ CLIENT.update_stats(counters, delta) def record_gauge(stat, value): """ Log gauge information for a single stat
def record(millis): c = Client('hammer.wallrazer.com', 8125) print "recording...\n" c.increment('wallrazer.deploys') c.timing('wallrazer.deploys.time', millis)
class Metric(object): """ The metrics client that communicates with graphite via local pystatsd. >>> from metrics import Metric >>> Metric.add("foo.bar.baz") >>> Metric.timing("foo.bar.millis", 123) """ _client = Client() @classmethod def _add_namespace(cls, name): namespace = _config.get("namespace") return "%s.%s" % (namespace, name) if namespace else name @classmethod def add(cls, name, delta=1): """ Updates a stats counter by arbitrary value (increments by one by default). >>> Metric.add("foo.bar.baz") # increments by one >>> Metric.add("baz.bar.foo", 10) # adds 10 to the stats counter @param name: the stats key @type name: str @param delta: the value to update @type delta: int """ if not cls._client: return if not name: return if not delta: return cls._client.update_stats(cls._add_namespace(name), delta) @classmethod def timing(cls, name, time): """ Submits time value for a given stats key. >>> Metric.timing("execution.time.baz", 123) @param name: the stats key @type name: str @param time: the time value to submit (in seconds) @type time: int or float """ if not cls._client: return if not name: return if time: time = float(time) if not time: return millis = int(time * 1000 + .5) cls._client.timing(cls._add_namespace(name), millis) @classmethod def start_timing(cls, name): """ Starts and returns a timing instance that tracks time for a given stats key. The stats will be updated once done() is invoked on the returned timing instance. >>> timer = Metric.start_timing("execution.time.baz") >>> # do stuff here... >>> timer.done() # submits stats @param name: the stats key @type name: str @rtype: _Timing """ return _Timing(name)
class ClientBasicsTestCase(unittest.TestCase): """ Tests the basic operations of the client """ def setUp(self): self.patchers = [] socket_patcher = mock.patch('pystatsd.statsd.socket.socket') self.mock_socket = socket_patcher.start() self.patchers.append(socket_patcher) self.client = Client() self.addr = (socket.gethostbyname(self.client.host), self.client.port) def test_client_create(self): host, port = ('example.com', 8888) client = Client(host=host, port=port, prefix='pystatsd.tests') self.assertEqual(client.host, host) self.assertEqual(client.port, port) self.assertEqual(client.prefix, 'pystatsd.tests') self.assertEqual(client.addr, (socket.gethostbyname(host), port)) def test_basic_client_incr(self): stat = 'pystatsd.unittests.test_basic_client_incr' stat_str = stat + ':1|c' self.client.increment(stat) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_decr(self): stat = 'pystatsd.unittests.test_basic_client_decr' stat_str = stat + ':-1|c' self.client.decrement(stat) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_update_stats(self): stat = 'pystatsd.unittests.test_basic_client_update_stats' stat_str = stat + ':5|c' self.client.update_stats(stat, 5) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_update_stats_multi(self): stats = [ 'pystatsd.unittests.test_basic_client_update_stats', 'pystatsd.unittests.test_basic_client_update_stats_multi' ] data = dict((stat, "%s|c" % '5') for stat in stats) self.client.update_stats(stats, 5) for stat, value in data.items(): stat_str = stat + value # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_call_any( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_timing(self): stat = 'pystatsd.unittests.test_basic_client_timing.time' stat_str = stat + ':5.000000|ms' self.client.timing(stat, 5) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_timing_since(self): ts = (1971, 6, 29, 4, 13, 0, 0, 0, -1) now = time.mktime(ts) # add 5 seconds ts = (1971, 6, 29, 4, 13, 5, 0, 0, -1) then = time.mktime(ts) mock_time_patcher = mock.patch('time.time', return_value=now) mock_time_patcher.start() stat = 'pystatsd.unittests.test_basic_client_timing_since.time' stat_str = stat + ':-5000000.000000|ms' self.client.timing_since(stat, then) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) mock_time_patcher.stop() def tearDown(self): for patcher in self.patchers: patcher.stop()
#!/usr/bin/env python from pystatsd import Client, Server sc = Client("localhost", 8125) sc.timing("python_test.time", 500) sc.increment("python_test.inc_int") sc.decrement("python_test.decr_int") sc.gauge("python_test.gauge", 42) srvr = Server(debug=True) srvr.serve()
# You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Authors: # - Luis Rodrigues, <*****@*****.**>, 2013 from pystatsd import Client from rucio.common.config import config_get import time server = config_get('monitor', 'carbon_server') port = config_get('monitor', 'carbon_port') scope = config_get('monitor', 'user_scope') pystatsd_client = Client(host=server, port=port, prefix=scope) def record_counter(counters, delta=1): """ Log one or more counters by arbitrary amounts :param counters: The counter or a list of counters to be updated. :param delta: The increment for the counter, by default increment by 1. """ pystatsd_client.update_stats(counters, delta) def record_gauge(stat, value): """ Log gauge information for a single stat
class ClientBasicsTestCase(unittest.TestCase): """ Tests the basic operations of the client """ def setUp(self): self.patchers = [] socket_patcher = mock.patch('pystatsd.statsd.socket.socket') self.mock_socket = socket_patcher.start() self.patchers.append(socket_patcher) self.client = Client() self.addr = (socket.gethostbyname(self.client.host), self.client.port) def test_client_create(self): host, port = ('example.com', 8888) client = Client( host=host, port=port, prefix='pystatsd.tests') self.assertEqual(client.host, host) self.assertEqual(client.port, port) self.assertEqual(client.prefix, 'pystatsd.tests') self.assertEqual(client.addr, (socket.gethostbyname(host), port)) def test_basic_client_incr(self): stat = 'pystatsd.unittests.test_basic_client_incr' stat_str = stat + ':1|c' self.client.increment(stat) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_decr(self): stat = 'pystatsd.unittests.test_basic_client_decr' stat_str = stat + ':-1|c' self.client.decrement(stat) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_update_stats(self): stat = 'pystatsd.unittests.test_basic_client_update_stats' stat_str = stat + ':5|c' self.client.update_stats(stat, 5) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_update_stats_multi(self): stats = [ 'pystatsd.unittests.test_basic_client_update_stats', 'pystatsd.unittests.test_basic_client_update_stats_multi' ] data = dict((stat, "%s|c" % '5') for stat in stats) self.client.update_stats(stats, 5) for stat, value in data.items(): stat_str = stat + value # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_call_any( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_timing(self): stat = 'pystatsd.unittests.test_basic_client_timing.time' stat_str = stat + ':5.000000|ms' self.client.timing(stat, 5) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) def test_basic_client_timing_since(self): ts = (1971, 6, 29, 4, 13, 0, 0, 0, -1) now = time.mktime(ts) # add 5 seconds ts = (1971, 6, 29, 4, 13, 5, 0, 0, -1) then = time.mktime(ts) mock_time_patcher = mock.patch('time.time', return_value=now) mock_time_patcher.start() stat = 'pystatsd.unittests.test_basic_client_timing_since.time' stat_str = stat + ':-5000000.000000|ms' self.client.timing_since(stat, then) # thanks tos9 in #python for 'splaining the return_value bit. self.mock_socket.return_value.sendto.assert_called_with( bytes(stat_str, 'utf-8'), self.addr) mock_time_patcher.stop() def tearDown(self): for patcher in self.patchers: patcher.stop()