def test_raw_reader(self):
     print("testing delete_caches, raw_reader")
     dl = DataLogger("testdata")
     dl.setup("mysql", "performance", "2018-04-01")
     for row in dl.raw_reader():
         pass
     assert row['bytes_received'] == '272517939'
 def test__getitem__(self):
     print("testing __getitem__")
     dl = DataLogger("testdata")
     dl.setup("mysql", "performance", "2018-04-01")
     caches = dl["caches"]
     print(caches)
     assert isinstance(caches, dict)
     tsa = dl["tsa"]
     print(tsa, type(tsa))
     self.assertIsInstance(tsa, datalogger4.TimeseriesArray)
     ts = dl["tsa", ("nagios.tilak.cc", )]
     print(ts)
     self.assertIsInstance(ts, datalogger4.Timeseries)
     assert tsa[("nagios.tilak.cc", )] == ts
     tsastats = dl["tsastats"]
     print(tsastats)
     self.assertIsInstance(tsastats, datalogger4.TimeseriesArrayStats)
     tsstats = dl["tsastats", ("nagios.tilak.cc", )]
     print(tsstats)
     self.assertIsInstance(tsstats, datalogger4.TimeseriesStats)
     assert tsastats[("nagios.tilak.cc", )] == tsstats
     qa = dl["qa"]
     print(qa)
     self.assertIsInstance(qa, datalogger4.QuantileArray)
     quantile = dl["qa", ("nagios.tilak.cc", )]
     print(quantile)
     assert isinstance(quantile, dict)
     assert qa[("nagios.tilak.cc", )] == quantile
 def test_read_meta_new(self):
     dl = DataLogger("testdata")
     dl.setup("mysql", "performance", "2018-04-01")
     metadir = os.path.join(dl.basedir, dl.project, "meta")
     metafile = os.path.join(metadir, "%s.yaml" % dl.tablename)
     if os.path.isfile(metafile):
         print("loading yaml style file %s", metafile)
         with open(metafile, "rt") as infile:
             meta = yaml.load(infile)
         self.assertEqual(meta["interval"], dl.interval)
         self.assertEqual(meta["delimiter"], dl.delimiter)
         description = meta["description"]
         index_keynames = tuple([
             key for key in description
             if description[key]["coltype"] == "index"
         ])
         print("index_keynames:", index_keynames)
         self.assertEqual(index_keynames, dl.index_keynames)
         value_keynames = tuple([
             key for key in description
             if description[key]["coltype"] == "value"
         ])
         print("value_kenames:", value_keynames)
         self.assertEqual(sorted(value_keynames), sorted(dl.value_keynames))
         ts_keyname = [
             key for key in description
             if description[key]["coltype"] == "ts"
         ][0]
         print("ts_keyname:", ts_keyname)
         self.assertEqual(ts_keyname, dl.ts_keyname)
         datatypes = dict([(key, description[key]["datatype"])
                           for key in description
                           if description[key]["coltype"] == "value"])
         print("datatypes:", datatypes)
         self.assertEqual(datatypes, dl.datatypes)
         blacklist = tuple([
             key for key in description
             if description[key]["coltype"] == "blacklist"
         ])
         print("blacklist:", blacklist)
         self.assertEqual(datatypes, dl.datatypes)
         headers_unsorted = [(key, description[key]["colpos"])
                             for key in description
                             if description[key]["colpos"] is not None]
         headers = tuple([
             item[0]
             for item in sorted(headers_unsorted, key=lambda item: item[1])
         ])
         print("headers:", headers)
         self.assertEqual(headers, dl.headers)
         label_texts = dict([(key, description[key]["label_text"])
                             for key in description])
         print("label:", label_texts)
         label_units = dict([(key, description[key]["label_unit"])
                             for key in description])
         print("label units:", label_units)
     else:
         print("new yaml config file %s not found" % metafile)
Exemple #4
0
 def test_get_scatterdata(self):
     dl = DataLogger("testdata")
     dl.setup("sanportperf", "fcIfC3AccountingTable", "2018-04-01")
     tsastats = dl["tsastats"]
     tsastats_grouped = Advanced.tsastats_group_by(
         tsastats, index_keynames=("hostname", ))
     scatter = Advanced.get_scatter_data(
         tsastats_grouped, ("fcIfC3InOctets", "fcIfC3OutOctets"), "avg")
     print(json.dumps(scatter, indent=4))
 def test_load_caches(self):
     print("testing delete_caches, get_caches")
     dl = DataLogger("testdata")
     dl.setup("mysql", "performance", "2018-04-01")
     dl.delete_caches()
     print(dl.get_caches())
     tsa = dl.load_tsa()
     print(dl.get_caches())
Exemple #6
0
def tsa_group_by(tsa, datestring, index_keynames, group_func, interval):
    """
    group given tsa by subkeys, and use group_func to aggregate data
    the individual timeseries are automatically grouped by timestamp and interval
    defined in configuration

    parameters:
    tsa <TimeseriesArray>
    datestring <str> datestring to use to aggregate data TODO: get this from tsa
    subkey <tuple> could also be empty, to aggregate everything
    group_func <func> like lambda a, b : (a + b) / 2 to get averages
    interval <int> interval in seconds the timeseries values should appear

    returns:
    <TimeseriesArray>
    """
    # intermediated tsa
    tsa2 = TimeseriesArray(index_keynames=index_keynames,
                           value_keynames=tsa.value_keynames,
                           ts_key=tsa.ts_key,
                           datatypes=tsa.datatypes)
    start_ts, _ = DataLogger.get_ts_for_datestring(datestring)
    ts_keyname = tsa.ts_key
    for data in tsa.export():
        # align timestamp
        nearest_slot = round((data[ts_keyname] - start_ts) / interval)
        data[ts_keyname] = int(start_ts + nearest_slot * interval)
        tsa2.add(data, group_func)
    return tsa2
 def test_total_stats(self):
     print("testing delete_caches, load_total_stats")
     dl = DataLogger("testdata")
     dl.setup("mysql", "performance", "2018-04-01")
     dl.delete_caches()
     total_stats = dl.load_total_stats()
     print(json.dumps(total_stats, indent=4))
Exemple #8
0
 def test_tsastats_group_by(self):
     dl = DataLogger("testdata")
     dl.setup("sanportperf", "fcIfC3AccountingTable", "2018-04-01")
     tsastats = dl["tsastats"]
     assert len(tsastats) == 712
     tsastats_grouped = Advanced.tsastats_group_by(
         tsastats, index_keynames=("hostname", ))
     assert len(tsastats_grouped) == 24
     self.assertIsInstance(tsastats_grouped,
                           datalogger4.TimeseriesArrayStats)
     self.assertIsInstance(tsastats_grouped[('fcb-sr3-4gb-32', )],
                           datalogger4.TimeseriesStats)
     print(tsastats_grouped[('fcb-sr3-4gb-32', )])
     tsastats_total = Advanced.tsastats_group_by(tsastats,
                                                 index_keynames=())
     assert len(tsastats_total) == 1
     self.assertIsInstance(tsastats_total, datalogger4.TimeseriesArrayStats)
     self.assertIsInstance(tsastats_total[('__total__', )],
                           datalogger4.TimeseriesStats)
     print(tsastats_total[('__total__', )])
 def test_convert(self):
     """
     load old style data, and dump new yaml file
     """
     dl = DataLogger("testdata")
     dl.setup("sanportperf", "fcIfC3AccountingTable", "2018-04-01")
     metadir = os.path.join(dl.basedir, dl.project, "meta")
     metafile = os.path.join(metadir, "%s.yaml" % dl.tablename)
     meta = {
         "interval": dl.interval,
         "description": {},
         "delimiter": dl.delimiter,
     }
     description = meta["description"]
     for colpos, header in enumerate(dl.headers):
         if header in dl.value_keynames:
             coltype = "value"
         elif header in dl.index_keynames:
             coltype = "index"
         elif header == dl.ts_keyname:
             coltype = "ts"
         elif header in dl.blacklist:
             coltype = "blacklist"
         else:
             coltype = "unknown"
         if header in dl.datatypes:
             datatype = dl.datatypes[header]
         else:
             datatype = None
         description[header] = {
             "colpos": colpos,
             "coltype": coltype,
             "datatype": datatype,
             "label_text": "some text to show as label text",
             "label_unit": "something/s"
         }
     print(yaml.dump(meta))
     if not os.path.isfile(metafile):
         print("writing %s" % metafile)
         with open(metafile, "wt") as outfile:
             outfile.write(yaml.dump(meta))
Exemple #10
0
 def test_tsa_group_by(self):
     dl = DataLogger("testdata")
     dl.setup("sanportperf", "fcIfC3AccountingTable", "2018-04-01")
     tsa = dl["tsa"]
     assert len(tsa) == 712
     print(tsa[('fcb-sr3-4gb-32', 'port-channel 2')])
     tsa_grouped = Advanced.tsa_group_by(tsa,
                                         dl.datestring,
                                         index_keynames=("hostname", ),
                                         group_func=lambda a, b:
                                         (a + b) / 2,
                                         interval=dl.interval)
     assert len(tsa_grouped) == 24
     print(tsa_grouped[('fcb-sr3-4gb-32', )])
     tsa_total = Advanced.tsa_group_by(tsa,
                                       dl.datestring,
                                       index_keynames=(),
                                       group_func=lambda a, b: (a + b) / 2,
                                       interval=dl.interval)
     assert len(tsa_total) == 1
     print(tsa_total[()])
 def test_add_table(self):
     print("testing add_table, delete_table")
     table_config = {
         "delimiter": "\t",
         "description": {
             "ts_col": {
                 "colpos": 0,
                 "coltype": "ts",
                 "datatype": None,
                 "label_text": "unixtimestamp",
                 "label_unit": "s"
             },
             "index_col": {
                 "colpos": 1,
                 "coltype": "index",
                 "datatype": None,
                 "label_text": "some index",
                 "label_unit": "index text"
             },
             "value_col": {
                 "colpos": 2,
                 "coltype": "value",
                 "datatype": "asis",
                 "label_text": "some value",
                 "label_unit": "some unit/s"
             }
         },
         "index_keynames": ["index_col"],
         "interval": 300
     }
     dl = DataLogger("testdata")
     dl.add_table("testproject", "testtable", table_config)
     assert "testproject" in dl.get_projects()
     assert "testtable" in dl.get_tablenames("testproject")
     dl.delete_table("testproject", "testtable")
     assert "testtable" not in dl.get_tablenames("testproject")
 def test_load_tsastats(self):
     print("testing delete_caches, load_tsastats")
     dl = DataLogger("testdata")
     dl.setup("sanportperf", "fcIfC3AccountingTable", "2018-04-01")
     dl.delete_caches()
     tsastats = dl.load_tsastats()
     #print(tsa)
     dl = DataLogger("testdata")
     dl.setup("mysql", "performance", "2018-04-01")
     dl.delete_caches()
     tsastats = dl.load_tsastats()
 def test__init__(self):
     try:
         DataLogger("/nonexisting")
     except AttributeError as exc:
         print("Expected Exception: %s" % exc)
     try:
         dl = DataLogger("testdata")
         dl.setup("unknownproject", self.tablename, "2018-04-01")
     except AttributeError as exc:
         print("Expected Exception: %s" % exc)
     try:
         DataLogger("testdata")
         dl.setup("sanportperf", "unknowntablename", "2018-04-01")
     except AttributeError as exc:
         print("Expected Exception: %s" % exc)
     try:
         DataLogger("testdata")
         dl.setup("sanportperf", "fcIfC3AccountingTable", "2018-04-01")
     except AttributeError as exc:
         print("Expected Exception: %s" % exc)
 def setUp(self):
     self.basedir = "testdata"
     self.project = "mysql"
     self.tablename = "performance"
     self.datestring = "2018-04-01"
     self.datalogger = DataLogger(self.basedir)
 def test_generate_caches(self):
     print("testing generate_caches")
     dl = DataLogger("testdata")
     dl.setup("mysql", "performance", "2018-04-01")
     dl.delete_caches()
     cache = dl["caches"]
     assert not cache["ts"]["keys"]
     dl.generate_caches()
     dl = DataLogger("testdata")
     dl.setup("mysql", "performance", "2018-04-01")
     cache = dl["caches"]
     assert cache["ts"]["keys"]
 def test_read_meta_old(self):
     print("testing __getitem__")
     dl = DataLogger("testdata")
     dl.setup("mysql", "performance", "2018-04-01")
     print(json.dumps(dl.meta, indent=4))
     meta = dl.meta
     if "descriptions" in meta:
         description = meta["descriptions"]
         index_keynames = tuple([
             key for key in description
             if description[key]["coltype"] == "index"
         ])
         print("index_keynames:", index_keynames)
         self.assertEqual(index_keynames, dl.index_keynames)
         value_keynames = tuple([
             key for key in description
             if description[key]["coltype"] == "value"
         ])
         print("value_kenames:", value_keynames)
         self.assertEqual(sorted(value_keynames), sorted(dl.value_keynames))
         ts_keyname = [
             key for key in description
             if description[key]["coltype"] == "ts"
         ][0]
         print("ts_keyname:", ts_keyname)
         self.assertEqual(ts_keyname, dl.ts_keyname)
         datatypes = dict([(key, description[key]["datatype"])
                           for key in description
                           if description[key]["coltype"] == "value"])
         print("datatypes:", datatypes)
         self.assertEqual(datatypes, dl.datatypes)
         blacklist = tuple([
             key for key in description
             if description[key]["coltype"] == "blacklist"
         ])
         print("blacklist:", blacklist)
         self.assertEqual(datatypes, dl.datatypes)
         headers_unsorted = [(key, description[key]["colpos"])
                             for key in description
                             if description[key]["colpos"] is not None]
         headers = tuple([
             item[0]
             for item in sorted(headers_unsorted, key=lambda item: item[1])
         ])
         print("headers:", headers)
         self.assertEqual(headers, dl.headers)
         label_texts = dict([(key, description[key]["label_text"])
                             for key in description])
         print("label:", label_texts)
         label_units = dict([(key, description[key]["label_unit"])
                             for key in description])
         print("label units:", label_units)
     # dump yaml file"
     metadir = os.path.join(dl.basedir, dl.project, "meta")
     metafile = os.path.join(metadir, "%s.yaml" % dl.tablename)
     if not os.path.isfile(metafile):
         meta = {
             "interval": dl.interval,
             "description": {},
             "delimiter": dl.delimiter,
         }
         description = meta["description"]
         for colpos, header in enumerate(dl.headers):
             if header in dl.value_keynames:
                 coltype = "value"
             elif header in dl.index_keynames:
                 coltype = "index"
             elif header == dl.ts_keyname:
                 coltype = "ts"
             elif header in dl.blacklist:
                 coltype = "blacklist"
             else:
                 coltype = "unknown"
             if header in dl.datatypes:
                 datatype = dl.datatypes[header]
             else:
                 datatype = None
             description[header] = {
                 "colpos": colpos,
                 "coltype": coltype,
                 "datatype": datatype,
                 "label_text": "some text to show as label text",
                 "label_unit": "something/s"
             }
         print(yaml.dump(meta))
         print("writing %s" % metafile)
         with open(metafile, "wt") as outfile:
             outfile.write(yaml.dump(meta))