コード例 #1
0
ファイル: DataLogger.py プロジェクト: gunny26/datalogger
    def import_tsa(self, datestring, tsa):
        """
        store tsa given in parameter in global_cache to make the data available

        usually this could be modfied existing tsa extended by some keys, or filtered or ...
        the structure has to be predefined in meta data

        the tsa can afterwards be accessed via normal frontends (web, api)

        parameters:
        tsa <TimeseriesArrayLazy> object
        """
        assert self.__index_keynames == tsa.index_keynames
        assert self.__value_keynames == tuple(tsa.value_keynames)
        cachedir = self.__get_cachedir(datestring)
        cachefilename = os.path.join(cachedir, TimeseriesArrayLazy.get_dumpfilename(tsa.index_keynames))
        if not os.path.isfile(cachefilename):
            tsa.dump_split(cachedir)
            tsastats = TimeseriesArrayStats(tsa)
            tsastats.dump(cachedir)
            qantile = QuantileArray(tsa, tsastats)
            q_cachefilename = os.path.join(cachedir, "quantile.json")
            qantile.dump(open(q_cachefilename, "wb"))
        else:
            raise StandardError("TSA Archive %s exists already in cache" % cachefilename)
コード例 #2
0
ファイル: DataLogger.py プロジェクト: gunny26/datalogger
 def fallback():
     """
     fallback method to use, if reading from cache data is not possible
     """
     tsa = self.load_tsa(datestring=datestring, filterkeys=None, timedelta=timedelta) # load full tsa, and generate statistics
     tsastats = TimeseriesArrayStats(tsa) # generate full Stats
     tsastats.dump(cachedir) # save
     tsastats = TimeseriesArrayStats.load(cachedir, self.__index_keynames, filterkeys=filterkeys) # read specific
     return tsastats
コード例 #3
0
ファイル: DataLogger.py プロジェクト: gunny26/datalogger
 def fallback():
     """
     fallback method to use, if reading from cache data is not possible
     """
     tsa = self.load_tsa_raw(datestring, timedelta)
     tsa.dump_split(cachedir) # save full data
     # read the data afterwards to make sure there is no problem,
     if validate is True:
         tsa = TimeseriesArrayLazy.load_split(cachedir, self.__index_keynames, filterkeys=filterkeys, index_pattern=index_pattern, datatypes=self.__datatypes)
     # also generate TSASTATS and dump to cache directory
     tsastats = TimeseriesArrayStats(tsa) # generate full Stats
     tsastats.dump(cachedir) # save
     # and at last but not least quantile
     qantile = QuantileArray(tsa, tsastats)
     cachefilename = os.path.join(cachedir, "quantile.json")
     qantile.dump(open(cachefilename, "wb"))
     # finally return tsa
     return tsa