Ejemplo n.º 1
0
def main():
    project = "vicenter"
    tablename = "virtualMachineMemoryStats"
    dataloggerweb = DataLoggerWeb(DATALOGGER_URL)
    datestring = dataloggerweb.get_last_business_day_datestring()
    year, month, day = datestring.split("-")
    date1 = datetime.date(int(year), int(month), int(day))
    print date1
    date2 = date1 - datetime.timedelta(days=7)
    print date2.isoformat()
    # report_group("vicenter", "virtualMachineCpuStats", datestring, date2.isoformat(), "cpu.used.summation")
    # report(project, tablename, datestring, date2.isoformat(), "mem.active.average")
    report(
        "vicenter", "virtualMachineDatastoreStats", datestring, date2.isoformat(), "datastore.totalReadLatency.average"
    )
Ejemplo n.º 2
0
def main():
    #project = "vicenter"
    #tablename = "virtualMachineMemoryStats"
    dataloggerweb = DataLoggerWeb(DATALOGGER_URL)
    datestring = dataloggerweb.get_last_business_day_datestring()
    year, month, day = datestring.split("-")
    date1 = datetime.date(int(year), int(month), int(day))
    date2 = date1 - datetime.timedelta(days=7)
    print "Comparing %s with %s" % (date1, date2.isoformat())
    #report_group("vicenter", "virtualMachineCpuStats", datestring, date2.isoformat(), "cpu.used.summation")
    report("vicenter", "virtualMachineCpuStats", datestring, date2.isoformat(), "cpu.used.summation")
    report("vicenter", "virtualMachineMemoryStats", datestring, date2.isoformat(), "mem.active.average")
    report("vicenter", "virtualMachineDatastoreStats", datestring, date2.isoformat(), "datastore.totalReadLatency.average")
    report("vicenter", "virtualMachineDatastoreStats", datestring, date2.isoformat(), "datastore.write.average")
    report("vicenter", "virtualMachineNetworkStats", datestring, date2.isoformat(), "net.usage.average")
    report("sanportperf", "fcIfC3AccountingTable", datestring, date2.isoformat(), "fcIfC3InOctets")
Ejemplo n.º 3
0
            # datalogger.get_tsa(project, tablename, datestring)
            datalogger.get_tsastats(project, tablename, datestring)
        else:
            # print("TSA filename : %s" % caches["tsa"]["keys"])
            if len(caches["tsastat"]["keys"]) == 0:
                print(suffix, "TSASTAT Archive missing, calling get_tsastats")
                datalogger.get_tsastats(project, tablename, datestring)
            else:
                # print("TSASTAT filename : %s" % caches["tsastat"]["keys"])
                if len(caches["ts"]["keys"]) == 0:
                    print(
                        suffix,
                        "there are no ts archives, something went wrong, or tsa is completely empty, calling get_tsastats",
                    )
                    datalogger.get_tsastats(project, tablename, datestring)
                else:
                    # print("TS filename : %s" % len(caches["ts"]["keys"]))
                    # print("TSSTAT filename : %s" % len(caches["tsstat"]["keys"]))
                    print(suffix, "All fine")


if __name__ == "__main__":
    datalogger = DataLoggerWeb()
    # for datestring in DataLogger.datewalker("2015-09-01", datalogger.get_last_business_day_datestring()):
    for datestring in datalogger.get_datewalk("2015-11-01", datalogger.get_last_business_day_datestring()):
        for project in datalogger.get_projects():
            for tablename in datalogger.get_tablenames(project):
                # datalogger = DataLogger(BASEDIR, project, tablename)
                main(project, tablename, datestring, datalogger)
    # cProfile.run("main()")
Ejemplo n.º 4
0
                    # group values by function
                    grouped_value = group_funcs[stat_func](value, data[group_key][value_key][stat_func])
                    # store
                    data[group_key][value_key][stat_func] = grouped_value
    # get to same format as TimeseriesArrayStats.to_json returns
    outdata = [tsastat.index_keys, tsastat.value_keys, ]
    outdata.append([(key, json.dumps(value)) for key, value in data.items()])
    # use TimeseriesArrayStats.from_json to get to TimeseriesArrayStats
    # object
    new_tsastat = TimeseriesArrayStats.from_json(json.dumps(outdata))
    return new_tsastat

if __name__ == "__main__":
    datalogger = DataLoggerWeb(DATALOGGER_URL)
    #caches = datalogger.get_caches("sanportperf", "fcIfC3AccountingTable", datalogger.get_last_business_day_datestring())
    tsastats = datalogger.get_tsastats("sanportperf", "fcIfC3AccountingTable", datalogger.get_last_business_day_datestring())
    g_tsastat1 = groupby(tsastats, (u'hostname',))
    tsastats = datalogger.get_tsastats("sanportperf", "fcIfC3AccountingTable", datalogger.get_last_business_day_datestring())
    g_tsastat2 = tsastats.group_by_index_keys((u'hostname',))
    print(g_tsastat1.keys())
    print(g_tsastat2.keys())
    assert g_tsastat1 == g_tsastat2
    g_tsastat = groupby(tsastats, (u'ifDescr',))
    print(g_tsastat.keys())
    g_tsastat = groupby(tsastats, (u'hostname', u'ifDescr',))
    assert g_tsastat == tsastats
    print(g_tsastat.keys())
    #tsastats.remove_by_value(u'fcIfC3InOctets', "sum", 0.0)
    #csvdata = tsastats.to_csv("sum", u'fcIfC3OutOctets', reverse=True)
    #print("\n".join(csv_to_table(csvdata[:20])))
    #print("\n".join(csv_to_wiki(csvdata[:20])))