Example #1
0
    def get_quantile_web(self, args):
        """
        return exported QuantileArray json formatted, special
        version for use in webpages to render with tablesorter

        in difference to get_quantile the value_keyname has to be given

        ex: Datalogger/get_quantile/{projectname}/{tablename}/{datestring}

        [
            dict of index_keys : dict of quantile,
            list of index_keys,
            list of value_names,
        ]

        returns:
        json(quantile_dict)
        """
        project, tablename, datestring, value_keyname = args[:4]
        datalogger = DataLogger(basedir, project, tablename)
        qa = datalogger.load_quantile(datestring)
        ret_data = []
        # build header
        ret_data.append(list(datalogger.index_keynames) + ["Q0", "Q1", "Q2", "Q3", "Q4"])
        # data part
        for k, v  in qa[value_keyname].quantile.items():
            ret_data.append(list(k) + v.values())
        return json.dumps(ret_data)
Example #2
0
    def get_quantile(self, project, tablename, datestring, args):
        """
        return exported QuantileArray json formatted

        ex: Datalogger/get_quantile/{projectname}/{tablename}/{datestring}

        [
            dict of index_keys : dict of quantile,
            list of index_keys,
            list of value_names,
        ]

        returns:
        json(quantile_dict)
        """
        logging.info("optional arguments received: %s", args)
        datalogger = DataLogger(basedir, project, tablename)
        quantile = datalogger.load_quantile(datestring)
        if len(args) > 0:
            value_keyname = args[0]
            ret_data = []
            # build header
            ret_data.append(list(datalogger.index_keynames) + ["Q0", "Q1", "Q2", "Q3", "Q4"])
            # data part
            for k, v  in quantile[value_keyname].quantile.items():
                ret_data.append(list(k) + v.values())
            return json.dumps(ret_data)
        return quantile.to_json()
Example #3
0
def gen_caches(project, tablename, datestring):
    datalogger = DataLogger(basedir, project, tablename)
    caches = datalogger.get_caches(datestring)
    suffix = "%s/%s/%s\t" % (datestring, project, tablename)
    data = None
    if caches["tsa"]["raw"] is None:
        if len(caches["tsa"]["keys"]) == 0:
            logging.info("%s RAW Data not availabale maybe archived, tsa exists already", suffix)
        else:
            logging.debug("%s RAW Data is missing, no tsa archive exists", suffix)
    else:
        if len(caches["tsa"]["keys"]) == 0:
            logging.info("%s TSA Archive missing, calling get_tsa and load_tsastats", suffix)
            data = datalogger.load_tsa(datestring)
        else:
            if len(caches["tsastat"]["keys"]) == 0:
                logging.info("%s TSASTAT Archive missing, calling load_tsastats", suffix)
                data = datalogger.load_tsastats(datestring)
            else:
                if len(caches["ts"]["keys"]) == 0:
                    logging.info("%s there are no ts archives, something went wrong, or tsa is completely empty, calling load_tsastats", suffix)
                    data = datalogger.load_tsastats(datestring)
                else:
                    logging.debug("%s All fine", suffix)
            if caches["quantile"]["exists"] is not True:
                logging.info("%s Quantile archive is missing, calling load_quantile", suffix)
                data = datalogger.load_quantile(datestring)
    del data
    del caches
    del datalogger
Example #4
0
    def get_quantile(self, args):
        """
        return exported QuantileArray json formatted

        ex: Datalogger/get_quantile/{projectname}/{tablename}/{datestring}

        [
            dict of index_keys : dict of quantile,
            list of index_keys,
            list of value_names,
        ]

        returns:
        json(quantile_dict)
        """
        project, tablename, datestring = args[:3]
        datalogger = DataLogger(basedir, project, tablename)
        quantile = datalogger.load_quantile(datestring)
        return quantile.to_json()
Example #5
0
class Test(unittest.TestCase):


    def setUp(self):
        self.basedir = "/var/rrd"
        self.datestring = "2015-11-30"
        self.project = DataLogger.get_projects(self.basedir)[0]
        self.tablename = DataLogger.get_tablenames(self.basedir, self.project)[0]
        self.datalogger = DataLogger(self.basedir, self.project, self.tablename)

    def test_simple(self):
        self.assertTrue(self.datalogger.project == self.project)
        self.assertTrue(self.datalogger.tablename == self.tablename)
        self.assertTrue(isinstance(self.datalogger.delimiter, basestring))
        self.assertTrue(isinstance(self.datalogger.ts_keyname, basestring))
        self.assertTrue(isinstance(self.datalogger.headers, tuple))
        self.assertTrue(isinstance(self.datalogger.value_keynames, tuple))
        self.assertTrue(all((keyname in self.datalogger.headers for keyname in self.datalogger.value_keynames)))
        self.assertTrue(isinstance(self.datalogger.index_keynames, tuple))
        self.assertTrue(all((keyname in self.datalogger.headers for keyname in self.datalogger.index_keynames)))
        self.assertTrue(isinstance(self.datalogger.blacklist, tuple))
        self.assertTrue(all((keyname in self.datalogger.headers for keyname in self.datalogger.blacklist)))
        self.assertTrue(isinstance(self.datalogger.raw_basedir, basestring))
        self.assertTrue(os.path.exists(self.datalogger.raw_basedir))
        self.assertTrue(os.path.isdir(self.datalogger.raw_basedir))
        self.assertTrue(isinstance(self.datalogger.global_cachedir, basestring))
        self.assertTrue(os.path.exists(self.datalogger.global_cachedir))
        self.assertTrue(os.path.isdir(self.datalogger.global_cachedir))
        # meta is something like this
        # {u'ts_keyname': u'ts',
        # 'stat_func_names': [u'count', ... ],
        # u'interval': 300,
        # u'blacklist': [],
        # u'headers': [u'ts', u'http_host', ... ],
        # u'delimiter': u'\t',
        # u'value_keynames': {
        #   u'actconn': u'asis',
        #   u'hits': u'asis',
        #   ...
        #   },
        # u'index_keynames': [u'http_host']}
        self.assertTrue(self.datalogger.meta["headers"] == list(self.datalogger.headers))
        self.assertTrue(self.datalogger.meta["value_keynames"].keys() == list(self.datalogger.value_keynames))
        self.assertTrue(self.datalogger.meta["index_keynames"] == list(self.datalogger.index_keynames))
        self.assertTrue(self.datalogger.meta["blacklist"] == list(self.datalogger.blacklist))
        self.assertTrue(self.datalogger.meta["delimiter"] == self.datalogger.delimiter)
        self.assertTrue(self.datalogger.meta["ts_keyname"] == self.datalogger.ts_keyname)
        self.assertTrue(isinstance(self.datalogger.meta["stat_func_names"], list))

    def test_statics(self):
        self.assertTrue(isinstance(DataLogger.get_user(self.basedir), basestring))
        self.assertTrue(isinstance(DataLogger.get_group(self.basedir), basestring))
        self.assertTrue(isinstance(DataLogger.get_yesterday_datestring(), basestring))
        lbd = DataLogger.get_last_business_day_datestring()
        self.assertTrue(isinstance(DataLogger.get_last_business_day_datestring(), basestring))
        self.assertTrue(isinstance(DataLogger.datestring_to_date(lbd), datetime.date))
        for datestring in DataLogger.datewalker("2016-01-01", "2016-02-29"):
            self.assertTrue(isinstance(datestring, basestring))
        for datestring in DataLogger.monthwalker("2016-02"):
            self.assertTrue(isinstance(datestring, basestring))
        self.assertEqual(list(DataLogger.monthwalker("2016-02"))[-1], "2016-02-29")
        self.assertTrue(isinstance(DataLogger.get_ts_for_datestring("2016-01-01"), tuple))
        self.assertTrue(isinstance(DataLogger.get_ts_for_datestring("2016-01-01")[0], float))
        self.assertTrue(isinstance(DataLogger.get_ts_for_datestring("2016-01-01")[1], float))


    def test_data(self):
        self.datalogger.load_tsa(self.datestring)
        self.datalogger.load_tsastats(self.datestring)
        self.datalogger.load_correlationmatrix(self.datestring)
        self.datalogger.load_quantile(self.datestring)