def test_process_prevents_infinite_loops(self): c = Configuration() c._aggregates = mock() c._schemas = [mock()] expect(c._aggregates.match).args('stat_1').returns(['stat_2']).times(1) expect(c._aggregates.match).args('stat_2').returns(['stat_1']).times(1) expect(c._schemas[0].store).times(2) c.process('stat_1', 'value', 'now')
}, 'hour' : { 'step' : '1h', 'steps' : 24, }, } } } AGGREGATES = ( ('foo', 'foo.*'), ('foo', 'foo.*.*'), ('foo.<bar>', 'foo.<bar>.*') ) config = Configuration() for name,spec in SCHEMAS.iteritems(): config.load_schema( name, spec ) config.load_aggregate( AGGREGATES ) config._transforms['unique'] = lambda dct, duration: len(dct) config._macros['redis_keylen'] = { 'fetch' : lambda handle, key: handle.hlen(key), 'condense' : lambda data: sum(data.values()), 'process_row' : lambda data: data, 'join_rows' : lambda rows: sum(rows), 'collapse' : True, } # insert some test data, 2 hours in 30 second intervals for schema in config._schemas:
'minute': { 'step': 60, 'steps': 5, }, 'hour': { 'step': '1h', 'steps': 24, }, } } } AGGREGATES = (('foo', 'foo.*'), ('foo', 'foo.*.*'), ('foo.<bar>', 'foo.<bar>.*')) config = Configuration() for name, spec in SCHEMAS.iteritems(): config.load_schema(name, spec) config.load_aggregate(AGGREGATES) config._transforms['unique'] = lambda dct, duration: len(dct) config._macros['redis_keylen'] = { 'fetch': lambda handle, key: handle.hlen(key), 'condense': lambda data: sum(data.values()), 'process_row': lambda data: data, 'join_rows': lambda rows: sum(rows), 'collapse': True, } # insert some test data, 2 hours in 30 second intervals for schema in config._schemas: