def __init__(self, **kwargs): self.client = None try: self.client = redis.StrictRedis(host=kwargs['host'], port=kwargs['port'], db=kwargs['db']) self.client.ping() logging.debug('Redis host=%s,port=%s,db=%d- Connected!', kwargs['host'], kwargs['port'], kwargs['db']) except Exception as ex: self.client = None logging.error("Redis host=%s,port=%s,db=%d- Error %s", ex, kwargs['host'], kwargs['port'], kwargs['db']) pass self.ts = None if self.client != None: logging.debug('Timeseries - Create') if 'timeseries' in kwargs: self.ts = Timeseries(self.client, type='gauge', intervals=kwargs['timeseries']) else: self.ts = Timeseries( self.client, type='gauge', intervals={ 'seconds': { 'step': 5, # 5 seconds 'steps': 120, # last 10 minutes 'read_cast': float, } })
def __init__(self, name, config): self._count = 0 self._name = name self._host = config.pop('host', 'sqlite:///:memory:') self._rolling = config.pop('rolling', 0) self._generator = config.pop('generator', None) config.setdefault('type', 'count') config.setdefault('write_func', long_or_float) config.setdefault('read_func', long_or_float) self._transform = config.get('transform') # parse the patterns and bind the Schema.match function # TODO: optimize this binding even further to reduce lookups at runtime self._patterns = config.pop('match', []) if isinstance(self._patterns, (tuple, list)): if len(self._patterns) != 1: self._patterns = [re.compile(x) for x in self._patterns] self.match = self._match_list else: self._patterns = re.compile(self._patterns[0]) self.match = self._match_single else: self._patterns = re.compile(self._patterns) self.match = self._match_single self.config = config self.timeseries = Timeseries(self._host, **config) # Bind some of the timeseries methods to this for convenience self.list = self.timeseries.list self.properties = self.timeseries.properties self.iterate = self.timeseries.iterate
def init_app(self, app): self.t = Timeseries( app.mxcache.redis_conn(), type='series', read_func=float, intervals={ 'minute': { 'step': 60, # 60 seconds 'steps': 12 * 60, # last 12 hours } }) app.metrics = self
def start(ctx, argv): """ Called once on script startup, before any other events. """ global client global reqTable global db global respTable global timeSeries client = MongoClient() db = client.mydb reqTable = db.reqTable respTable = db.respTable timeSeries = Timeseries(client, type='histogram', read_func=float, intervals={'minute': { 'step': 60, 'steps': 120, }}) ctx.log("start")
def __init__(self, client, loadFromDB=LOAD_DB, bulk_insert=BULK_INSERT, track_users=T_USER, track_words=T_WORDS): """ TODO: config persistence TODO: populate memory from database """ # Redis server to send data to self._client = client self._log = setup_log('manager') # database if loadFromDB: try: self.dbclient = MongoClient(MONGO_URL) self._DB = dbclient[MONGO_DB] self.load() except: self._log.error('could not connect to MongoDB.') self._intervals = { 'second': { 'step': 1, # one second 'steps': 60 * 60 * 24, # keep for 1 day }, 'minute': { 'step': 60, # 60 seconds 'steps': 60 * 24 * 3, # keep for 3 days }, 'hour': { 'step': '1h', # one hour 'steps': 30 * 24 # keep for 1 month }, 'day': { 'step': '1d', # one day 'steps': 90 # keep for 3 month } } self._inserts = defaultdict(Counter) self._insert_lock = threading.Lock() self._n_inserts = 0 self.bulk_insert = bulk_insert self._bulk_size = 10000 self._mini_batch_size = 16 self._pipelined = 0 self.track_users = track_users self.track_words = track_words # Series self.events = Timeseries(self._client, type='count', intervals=self._intervals) # Online users, consider offline after 2 minutes self.users = Timeseries( self._client, type='count', intervals={'second2': { 'step': 1, 'steps': 20 }}) # Effective words, keep for 1 month self.words = Timeseries( self._client, type='histogram', intervals={'month': { 'step': '30d', 'steps': 3 }})
from kairos import Timeseries import pymongo client = pymongo.MongoClient('localhost') t = Timeseries( client, type='histogram', read_func=float, intervals={ 'minute': { 'step': 60, # 60 seconds 'steps': 120, # last 2 hours } }) # t.insert('example', 3.14159) # t.insert('example', 2.71828) # t.insert('example', 2.71828) # t.insert('example', 3.71828) # t.insert('example', 4.71828) # t.insert('example', 5.71828) t.insert('example', 6.71828) t.insert('example', 7.71828) print t.get('example', 'minute')
def __init__(self, team_id, series_type): self.team_id = team_id self.series = Timeseries(self.client, type=series_type, intervals=self.INTERVALS)