def testSaveCountersAggregation(self, internal_window): internal_window.return_value = 0 widget_counter1 = stats.Counter('widget/render') widget_counter2 = stats.Counter('widget/render') latency_counter1 = stats.AverageCounter('widget/render/latency') latency_counter2 = stats.AverageCounter('widget/render/latency') counters = [widget_counter1, widget_counter2] counters += [latency_counter1, latency_counter2] def counters_func(): return [ stats.Counter('widget/render'), stats.AverageCounter('widget/render/latency') ] # log_counters should aggregate counters of the same type. # This is to make sure that different code paths in a request can # independently instantiate counter objects of the same name, and then the # intra-request counts will be aggregated together for the task data. widget_counter1.increment() widget_counter1.increment() widget_counter2.increment() latency_counter1.offset(50) latency_counter2.offset(100) _, processor = _log_counters(counters, counters_func) expected = { 0: { 'window': 0, 'counters': { 'widget/render': 3, 'widget/render/latency': (75.0, 2), } } } self.assertEqual(expected, processor.serialize())
def testCounterBuffer(self, internal_window): internal_window.return_value = 0 page_counter = stats.Counter('page/view') widget_counter = stats.Counter('widget/render') page_counter.increment() page_counter.increment() # Buffered until save. def counters_func(): return [stats.Counter('page/view'), stats.Counter('widget/render')] # Push local stats to a task. processors, processor = _log_counters([page_counter], counters_func) expected = { 0: { 'counters': { 'page/view': 2, }, 'window': 0, } } self.assertEqual(expected, processor.serialize()) widget_counter.offset(15) widget_counter.offset(-5) _log_counters([page_counter, widget_counter], counters_func, processors=processors) expected = { 0: { 'counters': { 'page/view': 4, # Increased since it is saved twice. 'widget/render': 10, }, 'window': 0, } } self.assertEqual(expected, processor.serialize()) # finalize() should be idempotent. self.assertEqual(10, widget_counter.finalize()) self.assertEqual(10, widget_counter.finalize())
def testMakeLogPath(self): test_date = datetime.date(2013, 5, 1) # Default date format and filename. counter = stats.Counter('test/counter') self.assertEqual( '/_titan/stats/counters/2013/05/01/test/counter/data-60s.json', stats._make_log_path(test_date, counter)) # Custom formatted date. counter = stats.Counter('test/counter', date_format='%Y/%m') self.assertEqual( '/_titan/stats/counters/2013/05/test/counter/data-60s.json', stats._make_log_path(test_date, counter)) # Custom filename. counter = stats.Counter('test/counter', data_filename='my/data.json') self.assertEqual( '/_titan/stats/counters/2013/05/01/test/counter/my/data.json', stats._make_log_path(test_date, counter))
def testManualCounterTimestamp(self, internal_window): def counters_func(): return [stats.Counter('widget/render')] normal_counter = stats.Counter('widget/render') normal_counter.offset(20) internal_window.return_value = 10000 old_counter = stats.Counter('widget/render') old_counter.offset(10) old_counter.timestamp = 3600.0 oldest_counter = stats.Counter('widget/render') oldest_counter.offset(5) oldest_counter.timestamp = 0 counters = [normal_counter, old_counter, oldest_counter] _, processor = _log_counters(counters, counters_func) expected = { 0: { 'window': 0, 'counters': { 'widget/render': 5, } }, 3600: { 'window': 3600, 'counters': { 'widget/render': 10, } }, 10000: { 'window': 10000, 'counters': { 'widget/render': 20, } }, } self.assertEqual(expected, processor.serialize())
def counters_func(): return [stats.Counter('page/view'), stats.Counter('widget/render')]
def counters_func(): return [stats.Counter('widget/render')]
def counters_func(): return [ stats.Counter('widget/render'), stats.AverageCounter('widget/render/latency') ]
def testAggregatorCounterUnusedCounterInWindow(self, internal_window): # Setup some data. def counters_func(): return [stats.Counter('page/view'), stats.Counter('widget/render')] page_counter = stats.Counter('page/view') widget_counter = stats.Counter('widget/render') internal_window.return_value = 0 # Log an initial set into the 3600 window. page_counter.offset(10) processors, processor = _log_counters([page_counter], counters_func) # Save different counter in a different window: widget_counter.increment() _log_counters([widget_counter], counters_func, processors=processors, timestamp=3600) # Save both counters in a later window: page_counter.increment() widget_counter.increment() _log_counters([page_counter, widget_counter], counters_func, processors=processors, timestamp=7200) expected = { 0: { 'window': 0, 'counters': { 'page/view': 10, } }, 3600: { 'window': 3600, 'counters': { 'widget/render': 1, } }, 7200: { 'window': 7200, 'counters': { 'page/view': 11, 'widget/render': 2, } }, } self.assertEqual(expected, processor.serialize()) # Save the data as if from a batch processor. batch_processor = processor.batch_processor batch_processor.process(processor.serialize()) batch_processor.finalize() # Get the data from the CountersService. counters_service = stats.CountersService() # The counters should not exist in an empty state between windows. expected = { 'page/view': [(0, 10), (7200, 11)], 'widget/render': [(3600, 1), (7200, 2)], } start_date = datetime.datetime.utcfromtimestamp(0) counter_data = counters_service.get_counter_data( ['page/view', 'widget/render'], start_date=start_date) self.assertEqual(expected, counter_data)
def testAggregatorAndCountersService(self, internal_window): # Setup some data. def counters_func(): return [stats.Counter('page/view'), stats.Counter('widget/render')] page_counter = stats.Counter('page/view') widget_counter = stats.Counter('widget/render') internal_window.return_value = 0 # Log an initial set into the 3600 window. page_counter.offset(10) widget_counter.offset(20) processors, processor = _log_counters([page_counter, widget_counter], counters_func, timestamp=3600) # Save another set of data, an hour later: page_counter.increment() widget_counter.increment() _log_counters([page_counter, widget_counter], counters_func, processors=processors, timestamp=7200) # Save another set of data, a day later: page_counter.increment() widget_counter.increment() _log_counters([page_counter, widget_counter], counters_func, processors=processors, timestamp=93600) expected = { 3600: { 'window': 3600, 'counters': { 'page/view': 10, 'widget/render': 20, } }, 7200: { 'window': 7200, 'counters': { 'page/view': 11, 'widget/render': 21, } }, 93600: { 'window': 93600, 'counters': { 'page/view': 12, 'widget/render': 22, } } } self.assertEqual(expected, processor.serialize()) # Save again, to make sure that duplicate data is collapsed when saved. _log_counters([page_counter, widget_counter], counters_func, processors=processors, timestamp=93600) self.assertEqual(expected, processor.serialize()) # Save the data as if from a batch processor. _finalize_processor(processor) # Get the data from the CountersService. counters_service = stats.CountersService() expected = { 'page/view': [(3600, 10), (7200, 11), (93600, 12)], 'widget/render': [(3600, 20), (7200, 21), (93600, 22)], } start_date = datetime.datetime.utcfromtimestamp(0) counter_data = counters_service.get_counter_data( ['page/view', 'widget/render'], start_date=start_date) self.assertEqual(expected, counter_data)