def __init__(self, conn, expires=86400, capacity=5000, prefix="lru", tag=None, arg_index=None, kwarg_name=None, slice_obj=slice(None)): """ conn: Redis Connection Object expires: Default key expiration time capacity: Approximate Maximum size of caching set prefix: Prefix for all keys in the cache tag: String (formattable optional) to tag keys with for purging arg_index/kwarg_name: Choose One, the tag string will be formatted with that argument slice_obj: Slice object to cut out un picklable thingz """ self.conn = conn self.expires = expires self.capacity = capacity self.prefix = prefix self.tag = tag self.arg_index = arg_index self.kwarg_name = kwarg_name self.slice = slice_obj self.statsd = statsd_client()
def setUp(self): from perfmetrics import set_statsd_client from perfmetrics import statsd_client from perfmetrics.testing import FakeStatsDClient self.stat_client = FakeStatsDClient() self.__orig_client = statsd_client() set_statsd_client(self.stat_client)
def setUp(self): from perfmetrics import set_statsd_client from perfmetrics import statsd_client from perfmetrics.testing import FakeStatsDClient from .. import vote self.stat_client = FakeStatsDClient() self.__orig_client = statsd_client() self.__orig_sample_rate = vote.METRIC_SAMPLE_RATE vote.METRIC_SAMPLE_RATE = 1 set_statsd_client(self.stat_client)
def setUp(self): self.coord = self._makeOne() self.viewer = MockViewer() self.coord.register(self.viewer) self.polled_tid = 0 self.polled_changes = None self.viewer.adapter.poller.poll_invalidations = self.poll_invalidations self.viewer.adapter.poller.get_current_tid = self.get_current_tid from perfmetrics import set_statsd_client from perfmetrics import statsd_client from perfmetrics.testing import FakeStatsDClient self.stat_client = FakeStatsDClient() self.__orig_client = statsd_client() set_statsd_client(self.stat_client)
def stat_timing(self, stat, value, rate=1): """ Record a timing value. For compatibility with the default settings of ``perfmetrics``, the stat name should end in ``.t`` The *value* should be a floating point difference of seconds (eg, ``time.time() - time.time()``). This will be converted to an integer number of milliseconds (again for consistency with ``perfmetrics``). """ client = statsd_client() if client is not None: # scale from float seconds to milliseconds value = int(value * 1000.0) client.timing(stat, value, rate, self._statsd_buf)
def __call__(self, environ, start_response): """Dispatcher to processBefore, processAfter, no-op, etc.""" req = Request(environ) # Mark the start time start = time.time() # Generate the response. If this is a GET request and text/html, # print elapsed resp = req.get_response(self.app) if req.method == "GET" and resp.content_type == "text/html": # elapsed = str(1 / (time.time() - start))[0:5] elapsed_t = time.time() - start client = statsd_client() if client is not None: client.timing("karl.html_request.duration", elapsed_t * 1000.0) elapsed = str(elapsed_t)[0:5] first_result = resp.body before = 'id="portal-copyright">' scoreboard = """ <style> .timeit {font-size: 0.7em; color:white} .timeit:hover {color: gray} </style> <div class="timeit">%s - Elapsed: %s sec</div> """ after = before + scoreboard % (self.hostname, elapsed) body = first_result.replace(before, after, 1) if isinstance(body, unicode): resp.charset = "UTF-8" resp.unicode_body = body else: resp.body = body return resp(environ, start_response)
def _find_changes_for_viewer(viewer, object_index): """ Given a freshly polled *object_index*, and the *viewer* that polled for it, build a changes iterator. Call this **before** updating the viewer's MVCC state, so that we know how far back we need to build the changes. Does not need to hold the lock, except that the index cannot be vacuumed until this process is complete (since we may need that for building changes). """ if viewer.highest_visible_tid is None or viewer.detached: # Snarf. Old state, and we probably lost track of changes. # Whelp, it needs to invalidate all its cached objects (so # we must return None), but it can still use our index and # poll state going forward; we don't need to go backwards. logger.debug( "Invalidating all persistent objects for viewer %r (detached? %s)", viewer, viewer.detached) if viewer.detached: client = statsd_client() if client is not None: client.incr( 'relstorage.cache.mvcc.invalidate_all_detached', 1, 1) # Always send, not a sample. Should be rare. return None # Somewhere in the index is a map with the highest visible tid # matching the last time this viewer polled. Everything from there # forward is a change that this viewer needs to see. # Note there could be no changes. changes = object_index.collect_changes_after( viewer.highest_visible_tid) return iteroiditems(changes)
def __call__(self, environ, start_response): """Dispatcher to processBefore, processAfter, no-op, etc.""" req = Request(environ) # Mark the start time start = time.time() # Generate the response. If this is a GET request and text/html, # print elapsed resp = req.get_response(self.app) if req.method == "GET" and resp.content_type == "text/html": # elapsed = str(1 / (time.time() - start))[0:5] elapsed_t = time.time() - start client = statsd_client() if client is not None: client.timing('karl.html_request.duration', elapsed_t * 1000.0) elapsed = str(elapsed_t)[0:5] first_result = resp.body before = 'id="portal-copyright">' scoreboard = """ <style> .timeit {font-size: 0.7em; color:white} .timeit:hover {color: gray} </style> <div class="timeit">%s - Elapsed: %s sec</div> """ after = before + scoreboard % (self.hostname, elapsed) body = first_result.replace(before, after, 1) if isinstance(body, unicode): resp.charset = 'UTF-8' resp.unicode_body = body else: resp.body = body return resp(environ, start_response)
def dummy_app(environ, start_response): from perfmetrics import statsd_client clients.append(statsd_client()) return ['ok.']
def dummy_handler(request): from perfmetrics import statsd_client clients.append(statsd_client()) return 'ok!'
def test_configured_with_other_client(self): other_client = object() from perfmetrics import set_statsd_client set_statsd_client(other_client) from perfmetrics import statsd_client self.assertIs(statsd_client(), other_client)
def test_configured_with_uri(self): from perfmetrics import set_statsd_client set_statsd_client('statsd://localhost:8125') from perfmetrics import StatsdClient from perfmetrics import statsd_client self.assertIsInstance(statsd_client(), StatsdClient)
def test_unconfigured(self): from perfmetrics import statsd_client self.assertIsNone(statsd_client())
cache_redis = None else: cache_redis = redis.StrictRedis.from_url(conf.cache_redis_url, **redis_kwargs) # Note: slice object is to cut off the instance of Store that would be passed along package_tag_lru = RedisLru(cache_redis, expires=86400, tag="pkg~%s", arg_index=1, slice_obj=slice(1, None)) cache_by_pkg = package_tag_lru.decorator if conf.xmlrpc_redis_url is None: xmlrpc_redis = None else: xmlrpc_redis = redis.StrictRedis.from_url(conf.xmlrpc_redis_url, **redis_kwargs) STATSD_URI = "statsd://127.0.0.1:8125?prefix=%s" % (conf.database_name) set_statsd_client(STATSD_URI) statsd_reporter = statsd_client() def log_xmlrpc_request(remote_addr, user_agent, data): if conf.xmlrpc_request_log_file: try: with open(conf.xmlrpc_request_log_file, 'a') as f: params, method = xmlrpclib.loads(data) dogstatsd.increment('xmlrpc.request', tags=['method:{}'.format(method)]) record = json.dumps({ 'timestamp': datetime.datetime.utcnow().isoformat(), 'remote_addr': remote_addr, 'user_agent': user_agent, 'method': method, 'params': params, 'type': 'request', })
cache_redis = None else: cache_redis = redis.StrictRedis.from_url(conf.cache_redis_url, **redis_kwargs) # Note: slice object is to cut off the instance of Store that would be passed along package_tag_lru = RedisLru(cache_redis, expires=86400, tag="pkg~%s", arg_index=1, slice_obj=slice(1, None)) cache_by_pkg = package_tag_lru.decorator if conf.xmlrpc_redis_url is None: xmlrpc_redis = None else: xmlrpc_redis = redis.StrictRedis.from_url(conf.xmlrpc_redis_url, **redis_kwargs) STATSD_URI = "statsd://127.0.0.1:8125?prefix=%s" % (conf.database_name) set_statsd_client(STATSD_URI) statsd_reporter = statsd_client() def log_xmlrpc_request(remote_addr, user_agent, data): if conf.xmlrpc_request_log_file: try: with open(conf.xmlrpc_request_log_file, 'a') as f: params, method = xmlrpclib.loads(data) record = json.dumps({ 'timestamp': datetime.datetime.utcnow().isoformat(), 'remote_addr': remote_addr, 'user_agent': user_agent, 'method': method, 'params': params, }) f.write(record + '\n') except Exception:
def stat_count(self, stat, value, rate=1): client = statsd_client() if client is not None: client.incr(stat, value, rate, self._statsd_buf)
def _statds_buf(self, _storage, buf, _force=None): client = statsd_client() if client is not None and buf: client.sendbuf(buf)