Esempio n. 1
0
def report(datalogger, datestring):
    # get data, from datalogger, or dataloggerhelper
    print "Loading data"
    tsa = datalogger.load_tsa(datestring)
    print "calculating quantilles"
    # tsa_test = tsa.slice(("cpu.used.summation", ))
    starttime = time.time()
    qa = QuantillesArray(tsa)
    print "Duration Quantilles: %f" % (time.time()-starttime)
    starttime = time.time()
    qa.dump(open("/tmp/test_quantilles.json", "wb"))
    print "Duration dump: %f" % (time.time()-starttime)
    starttime = time.time()
    qa2 = QuantillesArray.load(open("/tmp/test_quantilles.json", "rb"))
    print "Duration load: %f" % (time.time()-starttime)
    assert qa == qa2
    qa3 = datalogger.load_quantilles(datestring)
    assert qa3 == qa
    print "Output"
    print qa2[("srvarthur1.tilak.cc","0")]
    #quantilles = Quantilles(tsa, "cpu.used.summation", maxx=None)
    quantilles = qa2["cpu.used.summation"]
    #quantilles = Quantilles(tsa, "datastore.read.average", maxx=None)
    quantilles.sort(2)
    print "most demanding CPU Cores"
    print quantilles.head(20)
    print "least demanding CPU Cores"
    print quantilles.tail(20)
Esempio n. 2
0
def main():
    project = "nagios"
    for tablename in DataLogger.get_tablenames(BASEDIR, project):
        datalogger = DataLogger(BASEDIR, project, tablename)
        for datestring in datewalker("2015-04-01", "2015-09-23"):
            print datestring, tablename
            try:
                caches = datalogger.get_caches(datestring)
                #for cachetype, cachedata in caches.items():
                #    print "Caches for %s" % cachetype
                #    for key, filename in cachedata["keys"].items():
                #        print "\tfound %s in\n\t\t%s" % (key, filename)
                # there should be only one tsa file
                #print "Number of caches TimeseriesArray objects:", len(caches["tsa"]["keys"])
                #print "Number of caches TimeseriesArrayStats objects:", len(caches["tsastat"]["keys"])
                #print "Number of caches Timeseries objects:", len(caches["ts"]["keys"])
                #print "Number of caches TimeseriesStats objects:", len(caches["tsstat"]["keys"])
                if len(caches["tsa"]["keys"]) == 0:
                    print datestring, "TimeseriesArray cache missing"
                    datalogger.load_tsa(datestring)
                else:
                    #datalogger[datestring] # read from raw, and store tsa and ts caches
                    if len(caches["tsa"]["keys"]) != len(caches["tsastat"]["keys"]):
                        print datestring, "TimeseriesArrayStats caches missing"
                        datalogger.load_tsastats(datestring)
                    else:
                        if len(caches["ts"]["keys"]) != len(caches["tsstat"]["keys"]):
                            print datestring, "Number ob Timeseries and TimeseriesStats should be the same"
                        if len(caches["ts"]["keys"]) > len(caches["tsstat"]["keys"]):
                            print datestring, "some missing TimeseriesStats"
            except tilak_datalogger.DataLoggerRawFileMissing as exc:
                #logging.exception(exc)
                logging.info("%s no RAW Data available", datestring)
                pass
            except StandardError as exc:
                logging.exception(exc)
                pass