Exemplo n.º 1
0
def gen_caches(project, tablename, datestring):
    datalogger = DataLogger(basedir, project, tablename)
    caches = datalogger.get_caches(datestring)
    suffix = "%s/%s/%s\t" % (datestring, project, tablename)
    data = None
    if caches["tsa"]["raw"] is None:
        if len(caches["tsa"]["keys"]) == 0:
            logging.info("%s RAW Data not availabale maybe archived, tsa exists already", suffix)
        else:
            logging.debug("%s RAW Data is missing, no tsa archive exists", suffix)
    else:
        if len(caches["tsa"]["keys"]) == 0:
            logging.info("%s TSA Archive missing, calling get_tsa and load_tsastats", suffix)
            data = datalogger.load_tsa(datestring)
        else:
            if len(caches["tsastat"]["keys"]) == 0:
                logging.info("%s TSASTAT Archive missing, calling load_tsastats", suffix)
                data = datalogger.load_tsastats(datestring)
            else:
                if len(caches["ts"]["keys"]) == 0:
                    logging.info("%s there are no ts archives, something went wrong, or tsa is completely empty, calling load_tsastats", suffix)
                    data = datalogger.load_tsastats(datestring)
                else:
                    logging.debug("%s All fine", suffix)
            if caches["quantile"]["exists"] is not True:
                logging.info("%s Quantile archive is missing, calling load_quantile", suffix)
                data = datalogger.load_quantile(datestring)
    del data
    del caches
    del datalogger
Exemplo n.º 2
0
    def get_scatter_data(args):
        """
        gets scatter plot data of two value_keys of the same tablename

        ex: Datalogger/{projectname}/{tablename}/{datestring}/{value_keyname1}/{value_keyname2}/{stat function name}

        value_keyname{1/2} has to be one of get_value_keynames
        stat function name has to be one of get_stat_func_names

        returns:
        json(highgraph data)
        """
        assert len(args) == 6
        project, tablename, datestring, value_key1, value_key2, stat_func_name = args
        logging.info("project : %s", project)
        logging.info("tablename : %s", tablename)
        logging.info("datestring : %s", datestring)
        logging.info("value_key1 : %s", value_key1)
        logging.info("value_key2 : %s", value_key2)
        datalogger = DataLogger(basedir, project, tablename)
        tsastats = datalogger.load_tsastats(datestring)
        hc_scatter_data = []
        for key, tsstat in tsastats.items():
            hc_scatter_data.append({
                "name" : str(key),
                "data" : ((tsstat[value_key1]["avg"], tsstat[value_key2]["avg"]), )
            })
        return json.dumps(hc_scatter_data)
Exemplo n.º 3
0
 def sr_hrstorage_unused(args):
     """
     special report to get a report of unused SNMP Host Storage
     works only with snmp/hrStorageTable
     """
     datestring, storage_type = args[:2]
     datalogger = DataLogger(basedir, "snmp", "hrStorageTable")
     tsastat = datalogger.load_tsastats(datestring)
     data = []
     data.append(("hostname", "hrStorageDescr", "hrStorageSizeKb", "hrStorageUsedKb", "hrStorageNotUsedKbMin", "hrStorageNotUsedPct"))
     for index_key in tsastat.keys():
         # (u'srvcacdbp1.tilak.cc', u'Physical Memory',
         # u'HOST-RESOURCES-TYPES::hrStorageRam')
         if (u"HOST-RESOURCES-TYPES::%s" % storage_type) not in index_key:
             del tsastat[index_key]
         if index_key[1][:4] in (u"/run", u"/dev", u"/sys"):
             del tsastat[index_key]
     for key, tsstat in tsastat.items():
         sizekb = tsstat["hrStorageSize"]["min"] * tsstat["hrStorageAllocationUnits"]["max"] / 1024
         usedkb = tsstat["hrStorageUsed"]["max"] * tsstat["hrStorageAllocationUnits"]["max"] / 1024
         notused = sizekb - usedkb
         notused_pct = 0.0
         try:
             notused_pct = 100.0 *  notused / sizekb
         except ZeroDivisionError:
             pass
         data.append((key[0], key[1], "%0.2f" % sizekb, "%0.2f" % usedkb, "%0.2f" % notused, "%0.2f" % notused_pct))
     return json.dumps(data)
Exemplo n.º 4
0
    def get_monthly_ts(self, project, tablename, monthstring, args):
        """
        get monthly statistical values

        TODO: should be combined with get_lt_ts
        """
        index_key_enc = None
        value_keyname = None
        stat_func_name = "avg"
        if len(args) == 2:
            index_key_enc, value_keyname = args
        else:
            index_key_enc, value_keyname, stat_func_name = args
        if len(monthstring) != 7:
            web.internalerror()
            return "monthstring, has to be in YYYY-MM format"
        # key_str should be a tuple string, convert to unicode tuple
        index_key = tuple([unicode(key_value) for key_value in eval(base64.b64decode(index_key_enc))])
        logging.info("index_key : %s", index_key)
        logging.info("value_keyname : %s", value_keyname)
        logging.info("stat_func_name: %s", stat_func_name)
        datalogger = DataLogger(basedir, project, tablename)
        filterkeys = dict(zip(datalogger.index_keynames, index_key))
        ret_data = []
        for datestring in datalogger.monthwalker(monthstring):
            logging.debug("getting tsatstats for %s", monthstring)
            try:
                tsastats = datalogger.load_tsastats(datestring, filterkeys=filterkeys)
                ret_data.append([datestring, tsastats[index_key][value_keyname][stat_func_name]])
            except DataLoggerRawFileMissing as exc:
                logging.error("No Input File for datestring %s found, skipping this date", datestring)
            except DataLoggerLiveDataError as exc:
                logging.error("Reading from live data is not allowed, skipping this data, and ending loop")
                break
        return json.dumps(ret_data)
Exemplo n.º 5
0
    def get_tsastats_func(self, args):
        """
        return json data to render html table from it

        parameters:
        <b>project</b> project string
        <b>tablename</b> tablename string
        <b>datestring</b> datestring in YYYY-MM-DD form
        <b>stat_func_name</b> statistical function
        """
        project, tablename, datestring, stat_func_name = args
        datalogger = DataLogger(basedir, project, tablename)
        tsastats = datalogger.load_tsastats(datestring)
        return json.dumps(tsastats.to_csv(stat_func_name))
Exemplo n.º 6
0
    def sr_vicenter_unused_mem(args):
        """
        special resport to find virtual machine which are not used their ram entirely
        on this machines there is a possibility to save some virtual memory

        works only for VMware machine, in special virtualMachineMemoryStats
        """
        datestring = args[0]
        datalogger = DataLogger(basedir, "vicenter", "virtualMachineMemoryStats")
        tsastat = datalogger.load_tsastats(datestring)
        tsastat_g = datalogger.tsastat_group_by(tsastat, ("hostname", ))
        data = []
        data.append(("hostname", "avg_active_max", "avg_granted_min", "avg_notused_min"))
        for key in tsastat_g.keys():
            not_used = tsastat_g[key]["mem.granted.average"]["min"] - tsastat_g[key]["mem.active.average"]["max"]
            data.append((key[0], "%0.2f" % tsastat_g[key]["mem.active.average"]["max"], "%0.3f" % tsastat_g[key]["mem.granted.average"]["min"], "%0.2f" % not_used))
        return json.dumps(data)
Exemplo n.º 7
0
    def sr_vicenter_unused_cpu_cores(args):
        """
        special report to find virtual machine which re not used their virtual core entirely
        on this machine there is a possibility to save some virtual cores

        works only for VMware machines, in special virtualMachineCpuStats
        """
        datestring = args[0]
        datalogger = DataLogger(basedir, "vicenter", "virtualMachineCpuStats")
        tsastat = datalogger.load_tsastats(datestring)
        tsastat_g = datalogger.tsastat_group_by(tsastat, ("hostname", ))
        data = []
        data.append(("hostname", "avg_idle_min", "avg_used_avg", "avg_used_max"))
        for key in tsastat_g.keys():
            num_cpu = sum([key[0] in index_key for index_key in tsastat.keys()])
            if num_cpu < 3:
                continue
            data.append((key[0], "%0.2f" % tsastat_g[key]["cpu.idle.summation"]["min"], "%0.2f" % tsastat_g[key]["cpu.used.summation"]["avg"], "%0.2f" % tsastat_g[key]["cpu.used.summation"]["max"]))
        return json.dumps(data)
Exemplo n.º 8
0
    def get_tsastats(self, args):
        """
        return exported TimeseriesArrayStats json formatted

        [
            list of index_keys,
            list of value_keys,
            list of [
                index_key : tsstat_dictionary
                ]
        ]

        returns:
        json(tsastats_dict)
        """
        project, tablename, datestring = args[:3]
        datalogger = DataLogger(basedir, project, tablename)
        tsastats = datalogger.load_tsastats(datestring)
        return tsastats.to_json()
Exemplo n.º 9
0
 def get_tsastats_table(self, args):
     """
     return html renderer table from tsatstats data
     """
     def csv_to_table(csvdata, keys):
         outbuffer = []
         outbuffer.append("<thead><tr>")
         [outbuffer.append("<th>%s</th>" % header) for header in csvdata[0]]
         outbuffer.append("</tr></thead><tbody>")
         for values in csvdata[1:]:
             outbuffer.append("<tr>")
             [outbuffer.append("<td >%s</td>" % value) for value in values[0:keys]]
             [outbuffer.append("<td type=numeric>%0.2f</td>" % value) for value in values[keys:]]
             outbuffer.append("</tr>")
         outbuffer.append("</tbody>")
         return outbuffer
     project, tablename, datestring, stat_func_name = args
     datalogger = DataLogger(basedir, project, tablename)
     tsastats = datalogger.load_tsastats(datestring)
     return json.dumps("\n".join(csv_to_table(tsastats.to_csv(stat_func_name), len(tsastats.index_keys))))
Exemplo n.º 10
0
 def get_lt_ts(self, project, tablename, args):
     """
     get longtime statistical values
     """
     # datestringStart + "/" + datestringStop + "/" + Base64.encode(indexKey) + "/" + valueKeyname + "/" + statFuncName
     start, stop, index_key_enc, value_keyname, stat_func_name = args
     index_key = tuple([unicode(key_value) for key_value in eval(base64.b64decode(index_key_enc))])
     datalogger = DataLogger(basedir, project, tablename)
     filterkeys = dict(zip(datalogger.index_keynames, index_key))
     ret_data = []
     for datestring in datalogger.datewalker(start, stop):
         try:
             tsastats = datalogger.load_tsastats(datestring, filterkeys=filterkeys)
             ret_data.append([datestring, tsastats[index_key][value_keyname][stat_func_name]])
         except DataLoggerRawFileMissing as exc:
             logging.error("No Input File for datestring %s found, skipping this date", datestring)
         except DataLoggerLiveDataError as exc:
             logging.error("Reading from live data is not allowed, skipping this data, and ending loop")
             break
     return json.dumps(ret_data)
Exemplo n.º 11
0
    def get_tsastats(self, project, tablename, datestring, args):
        """
        return exported TimeseriesArrayStats json formatted

        [
            list of index_keys,
            list of value_keys,
            list of [
                index_key : tsstat_dictionary
                ]
        ]

        if optional args is given, only one specific statistical function is returned

        returns:
        json(tsastats_dict)
        """
        logging.info("optional arguments received: %s", args)
        datalogger = DataLogger(basedir, project, tablename)
        tsastats = datalogger.load_tsastats(datestring)
        if len(args) > 0:
            return json.dumps(tsastats.to_csv(args[0]))
        return tsastats.to_json()
Exemplo n.º 12
0
    def sr_hrstorageram_unused(args):
        """
        special report to find servers which are not using their ram entirely
        specially on virtual machines are is a huge saving potential

        works only for snmp data especially hrStorageTable
        """
        datestring = args[0]
        datalogger = DataLogger(basedir, "snmp", "hrStorageTable")
        tsastat = datalogger.load_tsastats(datestring)
        data = []
        data.append(("hostname", "hrStorageSizeKb", "hrStorageUsedKb", "hrStorageNotUsedKbMin", "hrStorageNotUsedPct"))
        for index_key in tsastat.keys():
            # (u'srvcacdbp1.tilak.cc', u'Physical Memory',
            # u'HOST-RESOURCES-TYPES::hrStorageRam')
            if u'HOST-RESOURCES-TYPES::hrStorageRam' not in index_key:
                del tsastat[index_key]
        for key, tsstat in datalogger.tsastat_group_by(tsastat, ("hostname", )).items():
            sizekb = tsstat["hrStorageSize"]["min"] * tsstat["hrStorageAllocationUnits"]["max"] / 1024
            usedkb = tsstat["hrStorageUsed"]["max"] * tsstat["hrStorageAllocationUnits"]["max"] / 1024
            notused = sizekb - usedkb
            notused_pct = 100.0 *  notused / sizekb
            data.append((key[0], "%0.2f" % sizekb, "%0.2f" % usedkb, "%0.2f" % notused, "%0.2f" % notused_pct))
        return json.dumps(data)
Exemplo n.º 13
0
    def get_tsstat(self, project, tablename, datestring, args):
        """
        return exported TimeseriesStats data

        [
            list of index_keys,
            list of value_keys,
            list of [
                index_key : tsstat_dictionary
                ]
        ]

        if optional args is given, only one specific statistical function is returned

        returns:
        json(tsastats_dict)
        """
        logging.info("optional arguments received: %s", args)
        if len(args) > 0:
            key_str = args[0]
            key = tuple([unicode(key_value) for key_value in eval(base64.b64decode(key_str))])
            datalogger = DataLogger(basedir, project, tablename)
            tsastats = datalogger.load_tsastats(datestring)
            return json.dumps(tsastats[key].stats)
Exemplo n.º 14
0
class Test(unittest.TestCase):


    def setUp(self):
        self.basedir = "/var/rrd"
        self.datestring = "2015-11-30"
        self.project = DataLogger.get_projects(self.basedir)[0]
        self.tablename = DataLogger.get_tablenames(self.basedir, self.project)[0]
        self.datalogger = DataLogger(self.basedir, self.project, self.tablename)

    def test_simple(self):
        self.assertTrue(self.datalogger.project == self.project)
        self.assertTrue(self.datalogger.tablename == self.tablename)
        self.assertTrue(isinstance(self.datalogger.delimiter, basestring))
        self.assertTrue(isinstance(self.datalogger.ts_keyname, basestring))
        self.assertTrue(isinstance(self.datalogger.headers, tuple))
        self.assertTrue(isinstance(self.datalogger.value_keynames, tuple))
        self.assertTrue(all((keyname in self.datalogger.headers for keyname in self.datalogger.value_keynames)))
        self.assertTrue(isinstance(self.datalogger.index_keynames, tuple))
        self.assertTrue(all((keyname in self.datalogger.headers for keyname in self.datalogger.index_keynames)))
        self.assertTrue(isinstance(self.datalogger.blacklist, tuple))
        self.assertTrue(all((keyname in self.datalogger.headers for keyname in self.datalogger.blacklist)))
        self.assertTrue(isinstance(self.datalogger.raw_basedir, basestring))
        self.assertTrue(os.path.exists(self.datalogger.raw_basedir))
        self.assertTrue(os.path.isdir(self.datalogger.raw_basedir))
        self.assertTrue(isinstance(self.datalogger.global_cachedir, basestring))
        self.assertTrue(os.path.exists(self.datalogger.global_cachedir))
        self.assertTrue(os.path.isdir(self.datalogger.global_cachedir))
        # meta is something like this
        # {u'ts_keyname': u'ts',
        # 'stat_func_names': [u'count', ... ],
        # u'interval': 300,
        # u'blacklist': [],
        # u'headers': [u'ts', u'http_host', ... ],
        # u'delimiter': u'\t',
        # u'value_keynames': {
        #   u'actconn': u'asis',
        #   u'hits': u'asis',
        #   ...
        #   },
        # u'index_keynames': [u'http_host']}
        self.assertTrue(self.datalogger.meta["headers"] == list(self.datalogger.headers))
        self.assertTrue(self.datalogger.meta["value_keynames"].keys() == list(self.datalogger.value_keynames))
        self.assertTrue(self.datalogger.meta["index_keynames"] == list(self.datalogger.index_keynames))
        self.assertTrue(self.datalogger.meta["blacklist"] == list(self.datalogger.blacklist))
        self.assertTrue(self.datalogger.meta["delimiter"] == self.datalogger.delimiter)
        self.assertTrue(self.datalogger.meta["ts_keyname"] == self.datalogger.ts_keyname)
        self.assertTrue(isinstance(self.datalogger.meta["stat_func_names"], list))

    def test_statics(self):
        self.assertTrue(isinstance(DataLogger.get_user(self.basedir), basestring))
        self.assertTrue(isinstance(DataLogger.get_group(self.basedir), basestring))
        self.assertTrue(isinstance(DataLogger.get_yesterday_datestring(), basestring))
        lbd = DataLogger.get_last_business_day_datestring()
        self.assertTrue(isinstance(DataLogger.get_last_business_day_datestring(), basestring))
        self.assertTrue(isinstance(DataLogger.datestring_to_date(lbd), datetime.date))
        for datestring in DataLogger.datewalker("2016-01-01", "2016-02-29"):
            self.assertTrue(isinstance(datestring, basestring))
        for datestring in DataLogger.monthwalker("2016-02"):
            self.assertTrue(isinstance(datestring, basestring))
        self.assertEqual(list(DataLogger.monthwalker("2016-02"))[-1], "2016-02-29")
        self.assertTrue(isinstance(DataLogger.get_ts_for_datestring("2016-01-01"), tuple))
        self.assertTrue(isinstance(DataLogger.get_ts_for_datestring("2016-01-01")[0], float))
        self.assertTrue(isinstance(DataLogger.get_ts_for_datestring("2016-01-01")[1], float))


    def test_data(self):
        self.datalogger.load_tsa(self.datestring)
        self.datalogger.load_tsastats(self.datestring)
        self.datalogger.load_correlationmatrix(self.datestring)
        self.datalogger.load_quantile(self.datestring)