def test_store(self): # Save settings old_cluster_servers = settings.CLUSTER_SERVERS old_remote_exclude_local = settings.REMOTE_EXCLUDE_LOCAL # Set test cluster servers settings.CLUSTER_SERVERS = ['127.0.0.1', '8.8.8.8'] # Test REMOTE_EXCLUDE_LOCAL = False settings.REMOTE_EXCLUDE_LOCAL = False test_store = Store() remote_hosts = [ remote_store.host for remote_store in test_store.remote_stores ] self.assertTrue('127.0.0.1' in remote_hosts) self.assertTrue('8.8.8.8' in remote_hosts) # Test REMOTE_EXCLUDE_LOCAL = True settings.REMOTE_EXCLUDE_LOCAL = True test_store = Store() remote_hosts = [ remote_store.host for remote_store in test_store.remote_stores ] self.assertTrue('127.0.0.1' not in remote_hosts) self.assertTrue('8.8.8.8' in remote_hosts) # Restore original settings settings.CLUSTER_SERVERS = old_cluster_servers settings.REMOTE_EXCLUDE_LOCAL = old_remote_exclude_local
def test_find_all_failed(self): # all finds failed store = Store( finders=[TestFinder()] ) message = 'All requests failed for find <FindQuery: a from \* until \*>' with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.find('a')) self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches( log_info.call_args[0][0], 'Exception during find <FindQuery: a from \* until \*> after [-.e0-9]+s: TestFinder.find_nodes' ) store = Store( finders=[TestFinder(), TestFinder()] ) with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.find('a')) self.assertEqual(log_info.call_count, 2) self.assertRegexpMatches( log_info.call_args[0][0], 'Exception during find <FindQuery: a from \* until \*> after [-.e0-9]+s: TestFinder.find_nodes' )
def __init__(self, hostname=False, directory=DEFAULT_COAL_HOLE): if hostname: self.store = Store(remote_hosts=[hostname]) elif directory: self.store = Store(directories=[directory]) else: raise ValueError, "hostname or directory required"
def test_fetch(self): disabled_finder = DisabledFinder() legacy_finder = LegacyFinder() test_finder = TestFinder() remote_finder = RemoteFinder() store = Store(finders=[ disabled_finder, legacy_finder, test_finder, remote_finder ], tagdb='graphite.tags.localdatabase.LocalDatabaseTagDB') # tagb is properly initialized self.assertIsInstance(store.tagdb, LocalDatabaseTagDB) # get all enabled finders finders = store.get_finders() self.assertEqual(list(finders), [legacy_finder, test_finder, remote_finder]) # get only local finders finders = store.get_finders(local=True) self.assertEqual(list(finders), [legacy_finder, test_finder]) # fetch with empty patterns result = store.fetch([], 1, 2, 3, {}) self.assertEqual(result, [])
def test_fetch_all_failed(self): # all finds failed store = Store( finders=[TestFinder()] ) with patch('graphite.storage.log.debug') as log_debug: with self.assertRaisesRegexp(Exception, 'All fetches failed for \[\'a\'\]'): list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_debug.call_count, 1) self.assertRegexpMatches(log_debug.call_args[0][0], 'Fetch for \[\'a\'\] failed after [-.e0-9]+s: TestFinder.find_nodes')
def test_find_all_failed(self): # all finds failed store = Store( finders=[TestFinder()] ) with patch('graphite.storage.log.debug') as log_debug: with self.assertRaisesRegexp(Exception, 'All finds failed for <FindQuery: a from \* until \*>'): list(store.find('a')) self.assertEqual(log_debug.call_count, 1) self.assertRegexpMatches(log_debug.call_args[0][0], 'Find for <FindQuery: a from \* until \*> failed after [-.e0-9]+s: TestFinder.find_nodes')
def test_custom_finder(self): store = Store(finders=[get_finder("tests.test_finders.DummyFinder")]) nodes = list(store.find("foo")) self.assertEqual(len(nodes), 1) self.assertEqual(nodes[0].path, "foo") nodes = list(store.find("bar.*")) self.assertEqual(len(nodes), 10) node = nodes[0] self.assertEqual(node.path.split(".")[0], "bar") time_info, series = node.fetch(100, 200) self.assertEqual(time_info, (100, 200, 10)) self.assertEqual(len(series), 10)
def test_find_pool_timeout(self): # pool timeout store = Store(finders=[RemoteFinder()]) def mock_pool_exec(pool, jobs, timeout): raise PoolTimeoutError() message = r'Timed out after [-.e0-9]+s for find <FindQuery: a from \* until \*>' with patch('graphite.storage.pool_exec', mock_pool_exec): with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.find('a')) self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches(log_info.call_args[0][0], message)
def test_get_index_pool_timeout(self): # pool timeout store = Store(finders=[RemoteFinder()]) def mock_pool_exec(pool, jobs, timeout): raise PoolTimeoutError() with patch('graphite.storage.pool_exec', mock_pool_exec): with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, 'Timed out after .*'): store.get_index() self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches(log_info.call_args[0][0], 'Timed out after [-.e0-9]+s')
def test_get_index_all_failed(self): # all finders failed store = Store(finders=[TestFinder()]) with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, 'All requests failed for get_index'): store.get_index() self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches( log_info.call_args[0][0], 'Exception during get_index after [-.e0-9]+s: TestFinder.find_nodes' ) store = Store(finders=[TestFinder(), TestFinder()]) with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp( Exception, r'All requests failed for get_index \(2\)'): store.get_index() self.assertEqual(log_info.call_count, 2) self.assertRegexpMatches( log_info.call_args[0][0], 'Exception during get_index after [-.e0-9]+s: TestFinder.find_nodes' )
def test_fetch_pool_timeout(self): # pool timeout store = Store(finders=[RemoteFinder()]) def mock_pool_exec(pool, jobs, timeout): raise PoolTimeoutError() message = r'Timed out after [-.e0-9]+s for fetch for \[\'a\'\]' with patch('graphite.storage.pool_exec', mock_pool_exec): with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches(log_info.call_args[0][0], message)
def test_custom_finder(self): store = Store(finders=[get_finder('tests.test_finders.DummyFinder')]) nodes = list(store.find("foo")) self.assertEqual(len(nodes), 1) self.assertEqual(nodes[0].path, 'foo') nodes = list(store.find('bar.*')) self.assertEqual(len(nodes), 10) node = nodes[0] self.assertEqual(node.path.split('.')[0], 'bar') time_info, series = node.fetch(100, 200) self.assertEqual(time_info, (100, 200, 10)) self.assertEqual(len(series), 10)
def test_get_index_pool_timeout(self): # pool timeout store = Store( finders=[RemoteFinder()] ) def mock_pool_exec(pool, jobs, timeout): raise PoolTimeoutError() with patch('graphite.storage.pool_exec', mock_pool_exec): with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, 'Timed out after .*'): store.get_index() self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches(log_info.call_args[0][0], 'Timed out after [-.e0-9]+s')
def test_fetch_pool_timeout(self): # pool timeout store = Store( finders=[RemoteFinder()] ) def mock_pool_exec(pool, jobs, timeout): raise PoolTimeoutError() with patch('graphite.storage.pool_exec', mock_pool_exec): with patch('graphite.storage.log.debug') as log_debug: with self.assertRaisesRegexp(Exception, 'All fetches failed for \[\'a\'\]'): list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_debug.call_count, 1) self.assertRegexpMatches(log_debug.call_args[0][0], 'Timed out in fetch after [-.e0-9]+s')
def test_find_pool_timeout(self): # pool timeout store = Store( finders=[RemoteFinder()] ) def mock_pool_exec(pool, jobs, timeout): raise PoolTimeoutError() with patch('graphite.storage.pool_exec', mock_pool_exec): with patch('graphite.storage.log.debug') as log_debug: with self.assertRaisesRegexp(Exception, 'All finds failed for <FindQuery: a from \* until \*>'): list(store.find('a')) self.assertEqual(log_debug.call_count, 1) self.assertRegexpMatches(log_debug.call_args[0][0], 'Timed out in find after [-.e0-9]+s')
def test_find_pool_timeout(self): # pool timeout store = Store( finders=[RemoteFinder()] ) def mock_pool_exec(pool, jobs, timeout): raise PoolTimeoutError() message = 'Timed out after [-.e0-9]+s for find <FindQuery: a from \* until \*>' with patch('graphite.storage.pool_exec', mock_pool_exec): with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.find('a')) self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches(log_info.call_args[0][0], message)
def test_fetch_pool_timeout(self): # pool timeout store = Store( finders=[RemoteFinder()] ) def mock_pool_exec(pool, jobs, timeout): raise PoolTimeoutError() message = 'Timed out after [-.e0-9]+s for fetch for \[\'a\'\]' with patch('graphite.storage.pool_exec', mock_pool_exec): with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches(log_info.call_args[0][0], message)
def mockStore(finders, request_limit=100, request_context=None): tagdb = Mock() def mockAutoCompleteTags(exprs, tagPrefix=None, limit=None, requestContext=None): self.assertEqual(exprs, ['tag1=value1']) self.assertEqual(tagPrefix, 'test') self.assertEqual(limit, request_limit) self.assertEqual(requestContext, request_context or {}) return ['testnotags'] tagdb.auto_complete_tags.side_effect = mockAutoCompleteTags def mockAutoCompleteValues(exprs, tag, valuePrefix=None, limit=None, requestContext=None): self.assertEqual(exprs, ['tag1=value1']) self.assertEqual(tag, 'tag2') self.assertEqual(valuePrefix, 'test') self.assertEqual(limit, request_limit) self.assertEqual(requestContext, request_context or {}) return ['testnotags'] tagdb.auto_complete_values.side_effect = mockAutoCompleteValues return Store( finders=finders, tagdb=tagdb, )
def test_multiple_globstars(self): self.addCleanup(self.wipe_whisper) store = Store(finders=get_finders('graphite.finders.standard.StandardFinder')) query = "x.**.x.**.x" hits = ["x.x.x", "x._.x.x", "x.x._.x", "x._.x._.x", "x._._.x.x", "x.x._._.x"] misses = ["x.o.x", "o.x.x", "x.x.o", "o.x.x.x", "x.x.x.o", "o._.x._.x", "x._.o._.x", "x._.x._.o"] for path in hits + misses: file = join(path.replace(".", os.sep)) + ".wsp" self.create_whisper(file) paths = [node.path for node in store.find(query, local=True)] for hit in hits: self.assertIn(hit, paths) for miss in misses: self.assertNotIn(miss, paths)
def test_terminal_globstar(self): self.addCleanup(self.wipe_whisper) finder = get_finder("graphite.finders.standard.StandardFinder") store = Store(finders=[finder]) query = "x.**" hits = ["x._", "x._._", "x._._._"] misses = ["x", "o._", "o.x._", "o._.x"] for path in hits + misses: file = join(path.replace(".", os.sep)) + ".wsp" self.create_whisper(file) paths = [node.path for node in store.find(query, local=True)] for hit in hits: self.assertIn(hit, paths) for miss in misses: self.assertNotIn(miss, paths) self.wipe_whisper()
def test_find(self): disabled_finder = DisabledFinder() legacy_finder = LegacyFinder() test_finder = TestFinder() remote_finder = RemoteFinder() store = Store( finders=[disabled_finder, legacy_finder, test_finder, remote_finder], tagdb=get_tagdb('graphite.tags.localdatabase.LocalDatabaseTagDB') ) # find nodes result = list(store.find('a')) self.assertEqual(len(result), 5) for node in result: if node.path in ['a.b.c.d', 'a.b.c.e']: self.assertIsInstance(node, LeafNode) else: self.assertIsInstance(node, BranchNode) self.assertTrue(node.path in ['a', 'a.b', 'a.b.c']) # find leaves only result = list(store.find('a', leaves_only=True)) self.assertEqual(len(result), 2) for node in result: self.assertIsInstance(node, LeafNode) self.assertTrue(node.path in ['a.b.c.d', 'a.b.c.e']) # failure threshold with self.settings(METRICS_FIND_FAILURE_THRESHOLD=1): with self.assertRaisesRegexp(Exception, 'Query a yields too many results and failed \(failure threshold is 1\)'): list(store.find('a')) # warning threshold with self.settings(METRICS_FIND_WARNING_THRESHOLD=1): with patch('graphite.storage.log.warning') as log_warning: list(store.find('a')) self.assertEqual(log_warning.call_count, 1) self.assertEqual( log_warning.call_args[0][0], 'Query a yields large number of results up to 2 (warning threshold is 1)' )
def test_get_index(self): disabled_finder = DisabledFinder() # use get_finders so legacy_finder is patched with get_index legacy_finder = get_finders('tests.test_storage.LegacyFinder')[0] test_finder = TestFinder() remote_finder = RemoteFinder() store = Store(finders=[ disabled_finder, legacy_finder, test_finder, remote_finder ], tagdb='graphite.tags.localdatabase.LocalDatabaseTagDB') # get index result = store.get_index() self.assertEqual(result, ['a.b.c.d', 'a.b.c.e']) # get local index result = store.get_index({'localOnly': True}) self.assertEqual(result, ['a.b.c.d'])
def test_fetch_some_failed(self): # some finders failed store = Store( finders=[TestFinder(), RemoteFinder()] ) with patch('graphite.storage.log.info') as log_info: list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_info.call_count, 1) store = Store( finders=[TestFinder(), TestFinder()] ) with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, 'All requests failed for fetch for \[\'a\'\] \(2\)'): list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_info.call_count, 2) self.assertRegexpMatches(log_info.call_args[0][0], 'Exception during fetch for \[\'a\'\] after [-.e0-9]+s: TestFinder.find_nodes')
def test_get_index(self): disabled_finder = DisabledFinder() # use get_finders so legacy_finder is patched with get_index legacy_finder = get_finders('tests.test_storage.LegacyFinder')[0] test_finder = TestFinder() remote_finder = RemoteFinder() store = Store( finders=[disabled_finder, legacy_finder, test_finder, remote_finder], tagdb=get_tagdb('graphite.tags.localdatabase.LocalDatabaseTagDB') ) # get index result = store.get_index() self.assertEqual(result, ['a.b.c.d', 'a.b.c.e']) # get local index result = store.get_index({'localOnly': True}) self.assertEqual(result, ['a.b.c.d'])
def test_fetch_all_failed(self): # all finds failed store = Store(finders=[TestFinder()]) message = r'All requests failed for fetch for \[\'a\'\] \(1\)' with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches( log_info.call_args[0][0], r'Exception during fetch for \[\'a\'\] after [-.e0-9]+s: TestFinder.find_nodes' ) store = Store(finders=[TestFinder(), TestFinder()]) message = r'All requests failed for fetch for \[\'a\'\] \(2\)' with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_info.call_count, 2) self.assertRegexpMatches( log_info.call_args[0][0], r'Exception during fetch for \[\'a\'\] after [-.e0-9]+s: TestFinder.find_nodes' )
def test_fetch_some_failed_hard_fail_enabled(self): # all finds failed store = Store(finders=[TestFinder(), RemoteFinder()]) with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp( Exception, '1 request\(s\) failed for fetch for \[\'a\'\] \(2\)'): list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches( log_info.call_args[0][0], 'Exception during fetch for \[\'a\'\] after [-.e0-9]+s: TestFinder.find_nodes' ) store = Store(finders=[TestFinder(), TestFinder()]) with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp( Exception, 'All requests failed for fetch for \[\'a\'\] \(2\)'): list(store.fetch(['a'], 1, 2, 3, {})) self.assertEqual(log_info.call_count, 2) self.assertRegexpMatches( log_info.call_args[0][0], 'Exception during fetch for \[\'a\'\] after [-.e0-9]+s: TestFinder.find_nodes' )
class Source(object): """The Source wraps up a graphite.Store and gives you Coal""" def __init__(self, hostname=False, directory=DEFAULT_COAL_HOLE): if hostname: self.store = Store(remote_hosts=[hostname]) elif directory: self.store = Store(directories=[directory]) else: raise ValueError, "hostname or directory required" def get_nodes(self, bucket='*', max_depth=9, depth=0): for bucket in self.store.find_all(bucket): if bucket.isLeaf(): yield Node(bucket) else: if depth < max_depth: for node in self.get_nodes(bucket=bucket.metric_path + '.*', depth=depth + 1): yield node def get_hosts(self, glob=DEFAULT_HOST_GLOB): return [host for host in self.store.find_all(glob)]
def test_fetch(self): disabled_finder = get_finders('tests.test_storage.DisabledFinder')[0] legacy_finder = get_finders('tests.test_storage.LegacyFinder')[0] test_finder = get_finders('tests.test_storage.TestFinder')[0] remote_finder = get_finders('tests.test_storage.RemoteFinder')[0] store = Store( finders=[disabled_finder, legacy_finder, test_finder, remote_finder], tagdb=get_tagdb('graphite.tags.localdatabase.LocalDatabaseTagDB') ) # tagb is properly initialized self.assertIsInstance(store.tagdb, LocalDatabaseTagDB) # get all enabled finders finders = store.get_finders() self.assertEqual(list(finders), [legacy_finder, test_finder, remote_finder]) # get only local finders finders = store.get_finders(local=True) self.assertEqual(list(finders), [legacy_finder, test_finder]) # fetch with empty patterns result = store.fetch([], 1, 2, 3, {}) self.assertEqual(result, []) # fetch result = store.fetch(['a.**'], 1, 2, 3, {}) self.assertEqual(len(result), 3) result.sort(key=lambda node: node['name']) self.assertEqual(result[0]['name'], 'a.b.c.d') self.assertEqual(result[0]['pathExpression'], 'a.**') self.assertEqual(result[1]['name'], 'a.b.c.d') self.assertEqual(result[1]['pathExpression'], 'a.**') self.assertEqual(result[2]['name'], 'a.b.c.e') self.assertEqual(result[2]['pathExpression'], 'a.**')
def test_find(self): disabled_finder = DisabledFinder() legacy_finder = LegacyFinder() test_finder = TestFinder() remote_finder = RemoteFinder() store = Store( finders=[ disabled_finder, legacy_finder, test_finder, remote_finder ], tagdb=get_tagdb('graphite.tags.localdatabase.LocalDatabaseTagDB')) # find nodes result = list(store.find('a')) self.assertEqual(len(result), 5) for node in result: if node.path in ['a.b.c.d', 'a.b.c.e']: self.assertIsInstance(node, LeafNode) else: self.assertIsInstance(node, BranchNode) self.assertTrue(node.path in ['a', 'a.b', 'a.b.c']) # find leaves only result = list(store.find('a', leaves_only=True)) self.assertEqual(len(result), 2) for node in result: self.assertIsInstance(node, LeafNode) self.assertTrue(node.path in ['a.b.c.d', 'a.b.c.e']) # failure threshold with self.settings(METRICS_FIND_FAILURE_THRESHOLD=1): with self.assertRaisesRegexp( Exception, 'Query a yields too many results and failed \(failure threshold is 1\)' ): list(store.find('a')) # warning threshold with self.settings(METRICS_FIND_WARNING_THRESHOLD=1): with patch('graphite.storage.log.warning') as log_warning: list(store.find('a')) self.assertEqual(log_warning.call_count, 1) self.assertEqual( log_warning.call_args[0][0], 'Query a yields large number of results up to 2 (warning threshold is 1)' )
def test_get_index_all_failed(self): # all finders failed store = Store( finders=[TestFinder()] ) with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, 'All requests failed for get_index'): store.get_index() self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches(log_info.call_args[0][0], 'Exception during get_index after [-.e0-9]+s: TestFinder.find_nodes') store = Store( finders=[TestFinder(), TestFinder()] ) with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, 'All requests failed for get_index \(2\)'): store.get_index() self.assertEqual(log_info.call_count, 2) self.assertRegexpMatches(log_info.call_args[0][0], 'Exception during get_index after [-.e0-9]+s: TestFinder.find_nodes')
def test_fetch(self): disabled_finder = get_finders('tests.test_storage.DisabledFinder')[0] legacy_finder = get_finders('tests.test_storage.LegacyFinder')[0] test_finder = get_finders('tests.test_storage.TestFinder')[0] remote_finder = get_finders('tests.test_storage.RemoteFinder')[0] store = Store( finders=[ disabled_finder, legacy_finder, test_finder, remote_finder ], tagdb=get_tagdb('graphite.tags.localdatabase.LocalDatabaseTagDB')) # tagb is properly initialized self.assertIsInstance(store.tagdb, LocalDatabaseTagDB) # get all enabled finders finders = store.get_finders() self.assertEqual(list(finders), [legacy_finder, test_finder, remote_finder]) # get only local finders finders = store.get_finders(local=True) self.assertEqual(list(finders), [legacy_finder, test_finder]) # fetch with empty patterns result = store.fetch([], 1, 2, 3, {}) self.assertEqual(result, []) # fetch result = store.fetch(['a.**'], 1, 2, 3, {}) self.assertEqual(len(result), 3) result.sort(key=lambda node: node['name']) self.assertEqual(result[0]['name'], 'a.b.c.d') self.assertEqual(result[0]['pathExpression'], 'a.**') self.assertEqual(result[1]['name'], 'a.b.c.d') self.assertEqual(result[1]['pathExpression'], 'a.**') self.assertEqual(result[2]['name'], 'a.b.c.e') self.assertEqual(result[2]['pathExpression'], 'a.**')
def test_find_all_failed(self): # all finds failed store = Store(finders=[TestFinder()]) message = r'All requests failed for find <FindQuery: a from \* until \*>' with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.find('a')) self.assertEqual(log_info.call_count, 1) self.assertRegexpMatches( log_info.call_args[0][0], r'Exception during find <FindQuery: a from \* until \*> after [-.e0-9]+s: TestFinder.find_nodes' ) store = Store(finders=[TestFinder(), TestFinder()]) with patch('graphite.storage.log.info') as log_info: with self.assertRaisesRegexp(Exception, message): list(store.find('a')) self.assertEqual(log_info.call_count, 2) self.assertRegexpMatches( log_info.call_args[0][0], r'Exception during find <FindQuery: a from \* until \*> after [-.e0-9]+s: TestFinder.find_nodes' )
def test_fetch_no_tag_support(self): class TestFinderNoTags(BaseFinder): tags = False def find_nodes(self, query): pass def fetch(self, patterns, start_time, end_time, now=None, requestContext=None): if patterns != ['notags;hello=tiger']: raise Exception('Unexpected patterns %s' % str(patterns)) return [{ 'pathExpression': 'notags;hello=tiger', 'name': 'notags;hello=tiger', 'time_info': (0, 60, 1), 'values': [], }] tagdb = Mock() def mockFindSeries(exprs, requestContext=None): self.assertEqual(requestContext, request_context) if exprs == ('hello=tiger', ) or exprs == ('name=notags', ): return ['notags;hello=tiger'] if exprs == ('name=testtags', ): return [] raise Exception('Unexpected exprs %s' % str(exprs)) tagdb.find_series.side_effect = mockFindSeries store = Store(finders=[TestFinderNoTags()], tagdb=tagdb) with patch('graphite.render.datalib.STORE', store): request_context = { 'startTime': epoch_to_dt(0), 'endTime': epoch_to_dt(60), 'now': epoch_to_dt(60), } results = evaluateTarget(request_context, [ 'notags;hello=tiger', 'seriesByTag("hello=tiger")', 'seriesByTag("name=testtags")', 'seriesByTag("name=notags")' ]) self.assertEqual(tagdb.find_series.call_count, 3) self.assertEqual(results, [ TimeSeries('notags;hello=tiger', 0, 60, 1, []), TimeSeries('notags;hello=tiger', 0, 60, 1, [], pathExpression='seriesByTag("hello=tiger")'), TimeSeries('notags;hello=tiger', 0, 60, 1, [], pathExpression='seriesByTag("name=notags")'), ])
def test_fetch_tag_support(self): class TestFinderTags(BaseFinder): tags = True def find_nodes(self, query): pass def fetch(self, patterns, start_time, end_time, now=None, requestContext=None): if patterns != [ 'seriesByTag("hello=tiger")', 'seriesByTag("name=notags")', 'seriesByTag("name=testtags")', 'testtags;hello=tiger' ]: raise Exception('Unexpected patterns %s' % str(patterns)) return [ { 'pathExpression': 'testtags;hello=tiger', 'name': 'testtags;hello=tiger', 'time_info': (0, 60, 1), 'values': [], }, { 'pathExpression': 'seriesByTag("hello=tiger")', 'name': 'testtags;hello=tiger', 'time_info': (0, 60, 1), 'values': [], }, { 'pathExpression': 'seriesByTag("name=testtags")', 'name': 'testtags;hello=tiger', 'time_info': (0, 60, 1), 'values': [], }, ] tagdb = Mock() store = Store(finders=[TestFinderTags()], tagdb=tagdb) request_context = { 'startTime': epoch_to_dt(0), 'endTime': epoch_to_dt(60), 'now': epoch_to_dt(60), } with patch('graphite.render.datalib.STORE', store): results = evaluateTarget(request_context, [ 'testtags;hello=tiger', 'seriesByTag("hello=tiger")', 'seriesByTag("name=testtags")', 'seriesByTag("name=notags")' ]) self.assertEqual(results, [ TimeSeries('testtags;hello=tiger', 0, 60, 1, []), TimeSeries('testtags;hello=tiger', 0, 60, 1, [], pathExpression='seriesByTag("hello=tiger")'), TimeSeries('testtags;hello=tiger', 0, 60, 1, [], pathExpression='seriesByTag("name=testtags")'), ])