Example #1
0
 def test_template_measurement_first(self):
     del self.finder
     template = "measurement.host.resource"
     self.config['influxdb']['templates'] = [template]
     measurements = ['load', 'idle',
                     'usage', 'user']
     tags = {'host': 'my_host',
             'resource': 'cpu',
             }
     fields = {'value': 1}
     self.write_data(measurements, tags, fields)
     self.finder = influxgraph.InfluxDBFinder(self.config)
     query = Query('*')
     nodes = sorted([n.name for n in self.finder.find_nodes(query)])
     expected = sorted(measurements)
     self.assertEqual(nodes, expected,
                      msg="Got root branch query result %s - wanted %s" % (
                          nodes, expected,))
     query = Query('%s.*' % (measurements[0]))
     nodes = sorted([n.name for n in self.finder.find_nodes(query)])
     expected = [tags['host']]
     self.assertEqual(nodes, expected,
                      msg="Got query %s result %s - wanted %s" % (
                          query.pattern, nodes, expected,))
     query = Query('%s.%s.*' % (measurements[0], tags['host'],))
     nodes = sorted([n.name for n in self.finder.find_nodes(query)])
     expected = sorted([tags['resource']])
     self.assertEqual(nodes, expected,
                      msg="Got query %s result %s - wanted %s" % (
                          query.pattern, nodes, expected,))
Example #2
0
 def test_template_drop_path_part(self):
     del self.finder
     # Filter out first part of metric, keep the remainder as
     # measurement name
     template = "..measurement*"
     self.config['influxdb']['templates'] = [self.template,
                                             template]
     measurements = ['my_host.cpu.load', 'my_host.cpu.idle',
                     'my_host.cpu.usage', 'my_host.cpu.user']
     fields = {"value": 1}
     self.write_data(measurements, {}, fields)
     self.finder = influxgraph.InfluxDBFinder(self.config)
     query = Query('*')
     nodes = sorted([n.name for n in self.finder.find_nodes(query)])
     expected = sorted(['my_host', self.metric_prefix])
     self.assertEqual(nodes, expected,
                      msg="Got root branch query result %s - wanted %s" % (
                          nodes, expected,))
     split_measurement = measurements[0].split('.')
     query = Query('%s.*' % (split_measurement[0]))
     nodes = [n.name for n in self.finder.find_nodes(query)]
     expected = [split_measurement[1]]
     self.assertEqual(nodes, expected,
                      msg="Got sub branch query result %s - wanted %s" % (
                          nodes, expected,))
     query = Query('%s.%s.*' % (split_measurement[0], split_measurement[1],))
     nodes = sorted([n.name for n in self.finder.find_nodes(query)])
     expected = sorted([m.split('.')[2] for m in measurements])
     self.assertEqual(nodes, expected,
                      msg="Got sub branch query result %s - wanted %s" % (
                          nodes, expected,))
Example #3
0
 def test_field_template_with_value_field(self):
     template = "env.host.measurement.field*"
     del self.finder
     measurements = ['cpuusage']
     fields = {'value': 1}
     tags = {'host': 'my_host1',
             'env': 'my_env1',
             }
     metrics = ['.'.join([tags['env'], tags['host'], m])
                for m in measurements]
     self.client.drop_database(self.db_name)
     self.client.create_database(self.db_name)
     self.write_data(measurements, tags, fields)
     self.config['influxdb']['templates'] = [template]
     self.finder = influxgraph.InfluxDBFinder(self.config)
     cpu_nodes = list(self.finder.find_nodes(Query('my_env1.my_host1.*')))
     expected = measurements
     self.assertEqual([n.name for n in cpu_nodes], expected)
     nodes = list(self.finder.find_nodes(Query('my_env1.my_host1.*.*')))
     self.assertEqual(nodes, [])
     _, data = self.finder.fetch_multi(cpu_nodes, int(self.start_time.strftime("%s")),
                                       int(self.end_time.strftime("%s")))
     for metric in metrics:
         datapoints = [v for v in data[metric] if v]
         self.assertTrue(len(datapoints) == self.num_datapoints)
Example #4
0
 def test_find_branch_nodes(self):
     """Test finding branch nodes by wildcard"""
     prefix = 'branch_test_prefix'
     written_branches = [
         'branch_node1.sub_branch1.sub_branch2.sub_branch3',
         'branch_node2.sub_branch11.sub_branch22.sub_branch33'
     ]
     leaf_nodes = ['leaf_node1', 'leaf_node2']
     written_series = [
         ".".join([
             prefix,
             branch,
             leaf_node,
         ]) for branch in written_branches for leaf_node in leaf_nodes
     ]
     data = [{
         "measurement": series,
         "tags": {},
         "time": _time,
         "fields": {
             "value": 1,
         }
     } for series in written_series for _time in [
         (self.end_time -
          datetime.timedelta(minutes=30)).strftime("%Y-%m-%dT%H:%M:%SZ"),
         (self.end_time -
          datetime.timedelta(minutes=2)).strftime("%Y-%m-%dT%H:%M:%SZ"),
     ]]
     self.assertTrue(self.client.write_points(data))
     self.finder.build_index()
     query = Query(prefix + '.*')
     nodes = list(self.finder.find_nodes(query))
     expected = sorted([b.split('.')[0] for b in written_branches])
     branches = sorted([n.name for n in nodes])
     self.assertEqual(branches,
                      expected,
                      msg="Got branches %s - wanted %s" % (
                          branches,
                          expected,
                      ))
     query = Query(prefix + '.branch_node*.*')
     nodes = list(self.finder.find_nodes(query))
     expected = sorted([b.split('.')[1] for b in written_branches])
     branches = sorted([n.name for n in nodes])
     self.assertEqual(branches,
                      expected,
                      msg="Got branches %s - wanted %s" % (
                          branches,
                          expected,
                      ))
     query = Query(prefix + '.branch_node*.sub_branch*.*')
     nodes = list(self.finder.find_nodes(query))
     expected = sorted([b.split('.')[2] for b in written_branches])
     branches = sorted([n.name for n in nodes])
     self.assertEqual(branches,
                      expected,
                      msg="Got branches list %s - wanted %s" % (
                          branches,
                          expected,
                      ))
Example #5
0
 def test_multi_fetch_data_multi_series_configured_aggregation_functions(self):
     """Test fetching data for multiple series with aggregation functions configured"""
     nodes = list(self.finder.find_nodes(Query(self.metric_prefix + ".agg_path.*")))
     paths = [node.path for node in nodes]
     aggregation_funcs = sorted(list(set(influxgraph.utils.get_aggregation_func(
         path, self.finder.aggregation_functions) for path in paths)))
     expected = sorted(DEFAULT_AGGREGATIONS.values())
     self.assertEqual(expected, aggregation_funcs,
                      msg="Expected aggregation functions %s for paths %s - got %s" % (
                          expected, paths, aggregation_funcs))
     time_info, data = self.finder.fetch_multi(nodes,
                                               int(self.start_time.strftime("%s")),
                                               int(self.end_time.strftime("%s")))
     self.assertTrue(nodes[0].path in data,
                     msg="Did not get data for requested series %s - got data for %s" % (
                         nodes[0].path, data.keys(),))
     for suffix in ['min', 'max', 'last', 'sum']:
         series = self.metric_prefix + ".agg_path.%s" % (suffix,)
         nodes = list(self.finder.find_nodes(Query(series)))
         time_info, data = self.finder.fetch_multi(nodes,
                                                   int(self.start_time.strftime("%s")),
                                                   int(self.end_time.strftime("%s")))
         self.assertTrue(series in data,
                         msg="Did not get data for requested series %s - got data for %s" % (
                             series, data.keys(),))
Example #6
0
 def test_data_with_fields(self):
     del self.finder
     template = "host.measurement.field*"
     self.config['influxdb']['templates'] = [template]
     measurements = ['cpu-0', 'cpu-1', 'cpu-2', 'cpu-3']
     fields = {'load': 1, 'idle': 1,
               'usage': 1, 'user': 1,
               'user.io': 1, 'idle.io': 1,
               'load.io': 1, 'usage.io': 1,
     }
     tags = {'host': 'my_host',
             'env': 'my_env',
             }
     cpu_metrics = ['.'.join(['my_host', m, f])
                    for m in measurements
                    for f in ['load', 'usage', 'user', 'idle']]
     io_metrics = ['.'.join(['my_host', m, f])
                    for m in measurements
                    for f in ['user.io', 'idle.io', 'load.io', 'usage.io']]
     self.client.drop_database(self.db_name)
     self.client.create_database(self.db_name)
     self.write_data(measurements, tags, fields)
     self.finder = influxgraph.InfluxDBFinder(self.config)
     query = Query('%s.*' % (tags['host']))
     nodes = sorted([n.name for n in self.finder.find_nodes(query)])
     expected = measurements
     self.assertEqual(nodes, expected,
                      msg="Got query %s result %s - wanted %s" % (
                          query.pattern, nodes, expected,))
     query = Query('%s.*.*' % (tags['host'],))
     nodes = list(self.finder.find_nodes(query))
     node_paths = sorted([n.path for n in nodes])
     expected = sorted(cpu_metrics)
     self.assertEqual(node_paths, expected,
                      msg="Got query %s result %s - wanted %s" % (
                          query.pattern, node_paths, expected,))
     time_info, data = self.finder.fetch_multi(nodes,
                                               int(self.start_time.strftime("%s")),
                                               int(self.end_time.strftime("%s")))
     for metric in cpu_metrics:
         self.assertTrue(metric in data,
                         msg="Did not get data for requested series %s - got data for %s" % (
                             metric, data.keys(),))
         datapoints = [v for v in data[metric] if v]
         self.assertTrue(len(datapoints) == self.num_datapoints,
                         msg="Expected %s datapoints for %s - got %s" % (
                             self.num_datapoints, metric, len(datapoints),))
     time_info, _data = self.finder.fetch_multi(nodes,
                                                int(self.start_time.strftime("%s")),
                                                int(self.end_time.strftime("%s")))
     for metric in cpu_metrics:
         datapoints = [v for v in _data[metric] if v]
         self.assertTrue(len(datapoints) == self.num_datapoints,
                         msg="Expected %s datapoints for %s - got %s" % (
                             self.num_datapoints, metric, len(datapoints),))
Example #7
0
 def test_find_leaf_nodes(self):
     """Test finding leaf nodes by wildcard"""
     prefix = 'branch_test_prefix'
     written_branches = [
         'branch_node1.sub_branch1.sub_branch2.sub_branch3',
         'branch_node2.sub_branch11.sub_branch22.sub_branch33'
     ]
     leaf_nodes = ['leaf_node1', 'leaf_node2']
     written_series = [
         ".".join([
             prefix,
             branch,
             leaf_node,
         ]) for branch in written_branches for leaf_node in leaf_nodes
     ]
     data = [{
         "measurement": series,
         "tags": {},
         "time": _time,
         "fields": {
             "value": 1,
         }
     } for series in written_series for _time in [
         (self.end_time -
          datetime.timedelta(minutes=30)).strftime("%Y-%m-%dT%H:%M:%SZ"),
         (self.end_time -
          datetime.timedelta(minutes=2)).strftime("%Y-%m-%dT%H:%M:%SZ"),
     ]]
     self.assertTrue(self.client.write_points(data))
     self.finder.build_index()
     query = Query(".".join([
         prefix, "branch_node*", "sub_branch*", "sub_branch*",
         "sub_branch*", "leaf*"
     ]))
     nodes = list(self.finder.find_nodes(query))
     expected = sorted(leaf_nodes + leaf_nodes)
     found_leaves = sorted([n.name for n in nodes])
     self.assertEqual(found_leaves,
                      expected,
                      msg="Expected leaf node list '%s' - got %s" %
                      (expected, found_leaves))
     for node in nodes:
         self.assertTrue(node.is_leaf,
                         msg="Leaf node %s is not marked as leaf node" %
                         (node))
     nodes = [
         node.name
         for node in self.finder.find_nodes(Query("fakeyfakeyfakefake.*"))
     ]
     self.assertEqual(nodes, [],
                      msg="Expected empty leaf node list - got %s" %
                      (nodes, ))
Example #8
0
 def test_get_series_pagination(self):
     query, limit = Query('*'), 5
     series = self.finder.get_all_series(
         query, limit=limit)
     self.assertTrue(len(series) == len(self.series),
                     msg="Did not get data for all series with page limit %s" % (
                         limit,))
     query, limit = Query('*'), 10
     series = self.finder.get_all_series(
         query, limit=limit)
     self.assertTrue(len(series) == len(self.series),
                     msg="Did not get data for all series with page limit %s" % (
                         limit,))
Example #9
0
 def test_multi_fetch_non_existant_series(self):
     """Test single fetch data for a series by name"""
     path1, path2 = 'fake_path1', 'fake_path2'
     reader1 = influxgraph.InfluxDBReader(
         InfluxDBClient(database=self.db_name), path1)
     reader2 = influxgraph.InfluxDBReader(
         InfluxDBClient(database=self.db_name), path2)
     nodes = [
         influxgraph.classes.leaf.InfluxDBLeafNode(path1, reader1),
         influxgraph.classes.leaf.InfluxDBLeafNode(path2, reader2)
     ]
     time_info, data = self.finder.fetch_multi(
         nodes, int(self.start_time.strftime("%s")),
         int(self.end_time.strftime("%s")))
     for metric_name in data:
         self.assertFalse(
             data[metric_name],
             msg="Expected no data for non-existant series %s - got %s" % (
                 metric_name,
                 data,
             ))
     fake_nodes = list(self.finder.find_nodes(Query('fake_pathy_path')))
     time_info, data = self.finder.fetch_multi(
         fake_nodes, int(self.start_time.strftime("%s")),
         int(self.end_time.strftime("%s")))
     self.assertFalse(data)
Example #10
0
 def test_multi_fetch_data_multi_series(self):
     """Test fetching data for multiple series by name"""
     nodes = list(
         self.finder.find_nodes(Query(self.metric_prefix + ".leaf*")))
     time_info, data = self.finder.fetch_multi(
         nodes, int(self.start_time.strftime("%s")),
         int(self.end_time.strftime("%s")))
     self.assertTrue(
         self.series1 in data and self.series2 in data,
         msg=
         "Did not get data for requested series %s and %s - got data for %s"
         % (
             self.series1,
             self.series2,
             data.keys(),
         ))
     self.assertEqual(
         time_info, (int(self.start_time.strftime("%s")),
                     int(self.end_time.strftime("%s")), self.step),
         msg="Time info and step do not match our requested values")
     for i, series in enumerate([self.series1, self.series2]):
         self.assertTrue(self.steps == len(data[series]),
                         msg="Expected %s datapoints, got %s instead" % (
                             self.steps,
                             len(data[series]),
                         ))
         datapoints = [v for v in data[series] if v]
         self.assertTrue(
             len(datapoints) == self.num_datapoints,
             msg="Expected %s datapoints for series %s - got %s" % (
                 self.num_datapoints,
                 series,
                 len(datapoints),
             ))
         self.assertTrue(datapoints[-1] == self.series_values[i])
Example #11
0
 def test_single_fetch_memcache_integration(self):
     self.config['influxdb']['memcache'] = {'host': 'localhost'}
     del self.config['search_index']
     self.finder = influxgraph.InfluxDBFinder(self.config)
     node = list(self.finder.find_nodes(Query(self.series1)))[0]
     aggregation_func = get_aggregation_func(
         node.path, self.finder.aggregation_functions)
     memcache_key = gen_memcache_key(int(self.start_time.strftime("%s")),
                                     int(self.end_time.strftime("%s")),
                                     aggregation_func, [node.path])
     self.finder.memcache.delete(memcache_key)
     node.reader.fetch(int(self.start_time.strftime("%s")),
                       int(self.end_time.strftime("%s")))
     self.assertTrue(
         node.reader.memcache.get(memcache_key),
         msg="Expected data for %s to be in memcache after a fetch" %
         (node.path, ))
     time_info, data = node.reader.fetch(
         int(self.start_time.strftime("%s")),
         int(self.end_time.strftime("%s")))
     datapoints = [v for v in data if v]
     self.assertTrue(self.steps == len(data),
                     msg="Expected %s datapoints, got %s instead" % (
                         self.steps,
                         len(data),
                     ))
Example #12
0
 def test_configured_deltas(self):
     del self.finder
     config = { 'influxdb': { 'host' : 'localhost',
                              'port' : 8086,
                              'user' : 'root',
                              'pass' : 'root',
                              'db' : self.db_name,
         # Set data interval to 1 second for queries
         # of one hour or less
         'deltas' : {3600: 1},},
         # 'search_index': 'index',
         }
     finder = influxgraph.InfluxDBFinder(config)
     self.assertTrue(finder.deltas)
     nodes = list(finder.find_nodes(Query(self.series1)))
     paths = [node.path for node in nodes]
     time_info, data = finder.fetch_multi(nodes,
                                          int(self.start_time.strftime("%s")),
                                          int(self.end_time.strftime("%s")))
     self.assertTrue(self.series1 in data,
                     msg="Did not get data for requested series %s - got data for %s" % (
                         self.series1, data.keys(),))
     self.assertTrue(len(data[self.series1]) == 3601,
                     msg="Expected exactly %s data points - got %s instead" % (
                         3601, len(data[self.series1])))
Example #13
0
 def test_fill_param_config(self):
     self.config['influxdb']['fill'] = 0
     self.finder = influxgraph.InfluxDBFinder(self.config)
     self.assertEqual(self.finder.fill_param,
                      self.config['influxdb']['fill'])
     nodes = list(self.finder.find_nodes(Query(self.series1)))
     time_info, data = self.finder.fetch_multi(
         nodes, int(self.start_time.strftime("%s")),
         int(self.end_time.strftime("%s")))
     self.assertTrue(
         self.series1 in data,
         msg="Did not get data for requested series %s - got data for %s" %
         (
             self.series1,
             data.keys(),
         ))
     self.assertEqual(
         time_info, (int(self.start_time.strftime("%s")),
                     int(self.end_time.strftime("%s")), self.step),
         msg="Time info and step do not match our requested values")
     self.assertTrue(len(data[self.series1]) == self.all_datapoints_num,
                     msg="Expected %s datapoints - got %s" % (
                         self.all_datapoints_num,
                         len(data[self.series1]),
                     ))
     self.assertTrue(
         data[self.series1][-1] == self.config['influxdb']['fill'])
Example #14
0
 def test_find_series_glob_expansion(self):
     """Test finding metric prefix by glob expansion"""
     query = Query('{%s}' % (self.metric_prefix))
     nodes = [node.name for node in self.finder.find_nodes(query)]
     self.assertTrue(self.metric_prefix in nodes,
                     msg="Node list does not contain prefix '%s' - %s" % (
                         self.metric_prefix, nodes))
Example #15
0
 def test_get_all_series(self):
     """ """
     query = Query('*')
     series = self.finder.get_all_series(cache=True, limit=1)
     self.assertTrue(len(series) == len(self.series),
                     msg="Got series list %s for root branch query - expected %s" % (
                         series, self.series,))
Example #16
0
 def test_index_load_from_file(self):
     values = [['carbon.relays.host.dispatcher1.wallTime_us'],
               ['carbon.relays.host.metricsReceived'],
               ['carbon.relays.host.metricsDropped'],
               ['carbon.relays.host.metricsQueued'],
               ]
     data = {'results': [{'series': [{
         'columns': ['key'],
         'values': values,
         }]}]}
     _tempfile = tempfile.NamedTemporaryFile(mode='wt', delete=False)
     try:
         _tempfile.write(json.dumps(data))
     except Exception:
         os.unlink(_tempfile.name)
         raise
     else:
         _tempfile.close()
     expected = ['carbon']
     try:
         self.finder.index.clear()
         self.finder.build_index(data=self.finder._read_static_data(_tempfile.name))
         self.assertEqual([n.name for n in self.finder.find_nodes(Query('*'))], expected)
     finally:
         os.unlink(_tempfile.name)
Example #17
0
 def test_find_series(self):
     """Test finding a series by name"""
     nodes = [node.name for node in self.finder.find_nodes(Query(self.series1))
              if node.is_leaf]
     expected = [self.nodes[0]]
     self.assertEqual(nodes, expected,
                     msg="Got node list %s - wanted %s" % (nodes, expected,))
Example #18
0
 def test_find_branch(self):
     """Test getting branch of metric path"""
     query = Query('fakeyfakeyfakefake')
     branches = list(self.finder.find_nodes(query))
     self.assertEqual(branches, [],
                      msg="Got branches list %s - wanted empty list" %
                      (branches, ))
     query = Query('*')
     prefix = 'branch_test_prefix'
     written_branches = ['branch_node1', 'branch_node2']
     leaf_nodes = ['leaf_node1', 'leaf_node2']
     written_series = [
         ".".join([
             prefix,
             branch,
             leaf_node,
         ]) for branch in written_branches for leaf_node in leaf_nodes
     ]
     data = [{
         "measurement": series,
         "tags": {},
         "time": _time,
         "fields": {
             "value": 1,
         }
     } for series in written_series for _time in [
         (self.end_time -
          datetime.timedelta(minutes=30)).strftime("%Y-%m-%dT%H:%M:%SZ"),
         (self.end_time -
          datetime.timedelta(minutes=2)).strftime("%Y-%m-%dT%H:%M:%SZ"),
     ]]
     self.assertTrue(self.client.write_points(data))
     self.finder.build_index()
     query = Query(prefix + '.*')
     # Test getting leaf nodes with wildcard
     query = Query(prefix + '.branch_node*.*')
     _nodes = list(self.finder.find_nodes(query))
     nodes = sorted([n.path for n in _nodes])
     expected = sorted(written_series)
     self.assertEqual(nodes,
                      expected,
                      msg="Got node list %s - wanted %s" % (
                          nodes,
                          expected,
                      ))
Example #19
0
 def test_named_branch_query(self):
     query = Query(self.metric_prefix)
     nodes = list(self.finder.find_nodes(query))
     node_names = [n.name for n in nodes]
     self.assertEqual(node_names, [self.metric_prefix],
                      msg="Expected node names %s, got %s" % (
                          [self.metric_prefix], node_names,))
     self.assertFalse(nodes[0].is_leaf,
                      msg="Root branch node incorrectly marked as leaf node")
Example #20
0
 def test_templated_index_find(self):
     query = Query('*')
     nodes = [n.name for n in self.finder.find_nodes(query)]
     expected = [self.metric_prefix]
     self.assertEqual(nodes, expected,
                      msg="Got root branch query result %s - wanted %s" % (
                          nodes, expected,))
     query = Query("%s.*" % (self.metric_prefix,))
     nodes = [n.name for n in self.finder.find_nodes(query)]
     expected = [self.tags[self.paths[1]]]
     self.assertEqual(nodes, expected,
                      msg="Got sub branch query result %s - wanted %s" % (
                          nodes, expected,))
     query = Query("%s.%s.*" % (self.metric_prefix, self.tags[self.paths[1]]))
     nodes = sorted([n.name for n in self.finder.find_nodes(query)])
     expected = sorted(self.measurements)
     self.assertEqual(nodes, expected,
                      msg="Got sub branch query result %s - wanted %s" % (
                          nodes, expected,))
Example #21
0
 def test_multi_tag_values_multi_measurements(self):
     measurements = ['cpu-0', 'cpu-1', 'cpu-2', 'cpu-3']
     fields = {'load': 1, 'idle': 1,
               'usage': 1, 'user': 1,
     }
     tags = {'host': 'my_host1',
             'env': 'my_env1',
             }
     data = [{
         "measurement": measurement,
         "tags": tags,
         "time": _time,
         "fields": fields,
         }
         for measurement in measurements
         for _time in [
             (self.end_time - datetime.timedelta(minutes=30)).strftime("%Y-%m-%dT%H:%M:%SZ"),
             (self.end_time - datetime.timedelta(minutes=2)).strftime("%Y-%m-%dT%H:%M:%SZ"),
             ]]
     metrics = ['.'.join([tags['host'], m, f])
                for f in fields.keys()
                for m in measurements]
     self.client.drop_database(self.db_name)
     self.client.create_database(self.db_name)
     self.assertTrue(self.client.write_points(data))
     tags_env2 = {'host': 'my_host1',
                  'env': 'my_env2',
                  }
     for d in data:
         d['tags'] = tags_env2
     self.assertTrue(self.client.write_points(data))
     template = "env.host.measurement.field*"
     self.config['influxdb']['templates'] = [template]
     self.finder = influxgraph.InfluxDBFinder(self.config)
     query = Query('*.*.*.*')
     nodes = list(self.finder.find_nodes(query))
     node_paths = sorted([n.path for n in nodes])
     tag_values = set(['.'.join([t['env'], t['host']])
                       for t in [tags, tags_env2]])
     _metrics = ['.'.join([t, m, f])
                 for t in tag_values
                 for f in fields.keys() if not '.' in f
                 for m in measurements]
     expected = sorted(_metrics)
     self.assertEqual(node_paths, expected,
                      msg="Expected %s nodes - got %s" % (
                          len(expected), len(node_paths)))
     _, multi_tag_data = self.finder.fetch_multi(nodes,
                                       int(self.start_time.strftime("%s")),
                                       int(self.end_time.strftime("%s")))
     for metric in _metrics:
         datapoints = [v for v in multi_tag_data[metric] if v]
         self.assertTrue(len(datapoints) == self.num_datapoints,
                         msg="Expected %s datapoints for %s - got %s" % (
                             self.num_datapoints, metric, len(datapoints),))
Example #22
0
 def test_tagged_data_no_template_config(self):
     del self.finder
     self.config['influxdb']['templates'] = None
     self.finder = influxgraph.InfluxDBFinder(self.config)
     query = Query('*')
     nodes = sorted([n.name for n in self.finder.find_nodes(query)])
     # expected = [self.metric_prefix]
     expected = sorted(self.measurements)
     self.assertEqual(nodes, expected,
                      msg="Expected only measurements in index with "
                      "no templates configured, got %s" % (nodes,))
Example #23
0
 def test_single_fetch_data(self):
     """Test single fetch data for a series by name"""
     node = list(self.finder.find_nodes(Query(self.series1)))[0]
     time_info, data = node.reader.fetch(int(self.start_time.strftime("%s")),
                                         int(self.end_time.strftime("%s")))
     self.assertTrue(self.steps == len(data),
                     msg="Expected %s datapoints, got %s instead" % (
                         self.steps, len(data),))
     datapoints = [v for v in data if v]
     self.assertTrue(len(datapoints) == self.num_datapoints,
                     msg="Expected %s datapoints - got %s" % (
                         self.num_datapoints, len(datapoints),))
Example #24
0
 def test_field_data_part_or_no_template_match(self):
     del self.finder
     measurements = ['test']
     fields = {'field1': 1, 'field2': 2}
     tags = {'env': 'my_env',
             'region': 'my_region',
             'dc': 'dc1'
             }
     self.client.drop_database(self.db_name)
     self.client.create_database(self.db_name)
     self.write_data(measurements, tags, fields)
     self.config['influxdb']['templates'] = ['env.template_tag.measurement.field*']
     self.finder = influxgraph.InfluxDBFinder(self.config)
     query = Query('*')
     nodes = [n.name for n in self.finder.find_nodes(query)]
     expected = []
     self.assertEqual(nodes, expected)
     self.config['influxdb']['templates'] = ['env.template_tag.measurement.field*',
                                             'env.region.measurement.field*']
     self.finder = influxgraph.InfluxDBFinder(self.config)
     query = Query('*')
     nodes = sorted([n.path for n in self.finder.find_nodes(query)])
     expected = [tags['env']]
     self.assertEqual(nodes, expected)
     query = Query('*.*')
     nodes = sorted([n.path for n in self.finder.find_nodes(query)])
     expected = sorted(['.'.join([tags['env'], tags['region']])])
     self.assertEqual(nodes, expected)
     query = Query('*.*.*')
     nodes = sorted([n.path for n in self.finder.find_nodes(query)])
     expected = sorted(['.'.join([tags['env'], tags['region'], measurements[0]])])
     self.assertEqual(nodes, expected)
     query = Query('*.*.*.*')
     nodes = sorted([n.path for n in self.finder.find_nodes(query)])
     expected = sorted(['.'.join([tags['env'], tags['region'], measurements[0], f])
                        for f in fields.keys()])
     self.assertEqual(nodes, expected)
Example #25
0
 def test_template_measurement_no_tags(self):
     template = "env.host.measurement.field*"
     del self.finder
     measurements = ['cpuusage']
     fields = {'value': 1}
     tags = {'host': 'my_host1',
             'env': 'my_env1',
             }
     metrics = ['.'.join([tags['env'], tags['host'], m])
                for m in measurements]
     self.client.drop_database(self.db_name)
     self.client.create_database(self.db_name)
     self.write_data(measurements, tags, fields)
     fields = {'value': 1,
               }
     self.write_data(measurements, {}, fields)
     self.config['influxdb']['templates'] = [template]
     self.finder = influxgraph.InfluxDBFinder(self.config)
     nodes = [n.name for n in self.finder.find_nodes(Query('*'))]
     expected = [tags['env']]
     self.assertEqual(nodes, expected)
     cpu_nodes = list(self.finder.find_nodes(Query('my_env1.my_host1.*')))
     expected = measurements
     self.assertEqual([n.name for n in cpu_nodes], expected)
 def test_series_loader(self):
     query = Query('*')
     loader_memcache_key = influxgraph.utils.gen_memcache_pattern_key("_".join([
         query.pattern, str(self.default_nodes_limit), str(0)]))
     del self.finder
     _loader_interval = 2
     config = { 'influxdb' : { 'host' : 'localhost',
                               'port' : 8086,
                               'user' : 'root',
                               'pass' : 'root',
                               'db' : self.db_name,
                               'series_loader_interval': _loader_interval,
                               'memcache' : { 'host': 'localhost',
                                              'ttl' : 60,
                                              'max_value': 20,
                                              },
                               'log_level': 0,
                               'fill': 'previous',
                               },}
     try:
         _memcache = memcache.Client([config['influxdb']['memcache']['host']])
         _memcache.delete(SERIES_LOADER_MUTEX_KEY)
     except NameError:
         pass
     finder = influxgraph.InfluxDBFinder(config)
     time.sleep(_loader_interval/2.0)
     # if finder.memcache:
     #     self.assertTrue(finder.memcache.get(SERIES_LOADER_MUTEX_KEY))
     self.assertTrue(finder.memcache)
     self.assertEqual(finder.memcache_ttl, 60,
                      msg="Configured TTL of %s sec, got %s sec TTL instead" % (
                          60, finder.memcache_ttl,))
     self.assertEqual(finder.memcache.server_max_value_length, 1024**2*20,
                      msg="Configured max value of %s MB, got %s instead" % (
                          1024**2*20, finder.memcache.server_max_value_length,))
     # Give series loader more than long enough to finish
     time.sleep(_loader_interval + 2)
     if finder.memcache:
         self.assertTrue(finder.memcache.get(loader_memcache_key))
     del finder
     config['influxdb']['loader_startup_block'] = False
     finder = influxgraph.InfluxDBFinder(config)
     try:
         self.assertTrue(_SERIES_LOADER_LOCK.acquire(block=False))
     except:
         pass
     else:
         _SERIES_LOADER_LOCK.release()
Example #27
0
 def test_templated_data_query(self):
     serie = self.graphite_series[0]
     nodes = list(self.finder.find_nodes(Query(serie)))
     time_info, data = self.finder.fetch_multi(nodes,
                                               int(self.start_time.strftime("%s")),
                                               int(self.end_time.strftime("%s")))
     self.assertTrue(serie in data,
                     msg="Did not get data for requested series %s - got data for %s" % (
                         serie, data.keys(),))
     self.assertEqual(time_info,
                      (int(self.start_time.strftime("%s")),
                       int(self.end_time.strftime("%s")),
                      self.step),
                      msg="Time info and step do not match our requested values")
     datapoints = [v for v in data[serie] if v]
     self.assertTrue(len(datapoints) == self.num_datapoints,
                     msg="Expected %s datapoints for %s - got %s" % (
                         self.num_datapoints, serie, len(datapoints),))
Example #28
0
 def test_multi_fetch_data(self):
     """Test fetching data for a single series by name"""
     nodes = list(self.finder.find_nodes(Query(self.series1)))
     time_info, data = self.finder.fetch_multi(nodes,
                                               int(self.start_time.strftime("%s")),
                                               int(self.end_time.strftime("%s")))
     self.assertTrue(self.series1 in data,
                     msg="Did not get data for requested series %s - got data for %s" % (
                         self.series1, data.keys(),))
     self.assertEqual(time_info,
                      (int(self.start_time.strftime("%s")),
                       int(self.end_time.strftime("%s")),
                      self.step),
                      msg="Time info and step do not match our requested values")
     datapoints = [v for v in data[self.series1] if v]
     self.assertTrue(len(datapoints) == self.num_datapoints,
                     msg="Expected %s datapoints - got %s" % (
                         self.num_datapoints, len(datapoints),))
Example #29
0
 def test_template_multiple_tags(self):
     self.client.drop_database(self.db_name)
     self.client.create_database(self.db_name)
     template = "*.disk. host.measurement.path.fstype.field*"
     measurement = 'disk'
     tags = {'host': 'my_host',
             'path': '/',
             'fstype': 'ext4',
             }
     fields = {'free': 1,
               'used': 1,
               }
     self.write_data([measurement], tags, fields)
     self.config['influxdb']['templates'] = [template]
     self.finder = influxgraph.InfluxDBFinder(self.config)
     query = Query('%s.%s.%s.%s.*' % (
         tags['host'], measurement, tags['path'], tags['fstype']))
     nodes = list(self.finder.find_nodes(query))
     self.assertEqual(sorted([n.name for n in nodes]), sorted(fields.keys()))
Example #30
0
 def test_non_greedy_field(self):
     measurements = ['cpu-0', 'cpu-1', 'cpu-2', 'cpu-3']
     fields = {'load': 1, 'idle': 1,
               'usage': 1, 'user': 1,
     }
     tags = {'host': 'my_host',
             'env': 'my_env',
             }
     data = [{
         "measurement": measurement,
         "tags": tags,
         "time": _time,
         "fields": fields,
         }
         for measurement in measurements
         for _time in [
             (self.end_time - datetime.timedelta(minutes=30)).strftime("%Y-%m-%dT%H:%M:%SZ"),
             (self.end_time - datetime.timedelta(minutes=2)).strftime("%Y-%m-%dT%H:%M:%SZ"),
             ]]
     metrics = ['.'.join([tags['host'], m, f])
                for f in fields.keys()
                for m in measurements]
     self.client.drop_database(self.db_name)
     self.client.create_database(self.db_name)
     self.assertTrue(self.client.write_points(data))
     template = "host.measurement.field"
     self.config['influxdb']['templates'] = [template]
     self.finder = influxgraph.InfluxDBFinder(self.config)
     query = Query('%s.*.*' % (tags['host'],))
     nodes = list(self.finder.find_nodes(query))
     node_paths = sorted([n.path for n in nodes])
     _metrics = ['.'.join([tags['host'], m, f])
                 for f in fields.keys() if not '.' in f
                 for m in measurements ]
     expected = sorted(_metrics)
     self.assertEqual(node_paths, expected,
                      msg="Expected nodes %s from template with non-greedy field - got %s" % (
                          expected, node_paths))