def read(self): metric = self.params['metric'] period_length = self.params['period_length'] seasons = self.params['seasons'] interval = self.params['interval'] # gather data and assure requirements self._read_tdigest() data = [el for el in self.metric_sink.read(metric)] if not data[0]: self.logger.error('%s :: No Datapoints. Exiting' % self.service) return None data = sorted(data, key=lambda tup: tup.timestamp) if int(time()) - data[-1].timestamp > 3 * interval: self.logger.error('%s :: Datapoints are too old (%d sec). Exiting' % ( self.service, (int(time()) - data[-1].timestamp))) return None data = insert_missing_datapoints(data, False, interval) if len(data) < period_length * seasons + 1: self.logger.error( '%s :: Not enough (%d) datapoints. Exiting' % ( self.service, len(data))) return None data = data[-period_length * seasons - 1:-1] return data
def read(self): metric = self.params['metric'] period_length = self.params['period_length'] seasons = self.params['seasons'] default = self.params['default'] tdigest_json = [el for el in self.metric_sink.read(self.tdigest_key)] if tdigest_json: centroids = json.loads(tdigest_json[0]) [self.td.add(c[0], c[1]) for c in centroids] # gather data and assure requirements data = [el for el in self.metric_sink.read(metric)] data = sorted(data, key=lambda tup: tup.timestamp) step_size = find_step_size(data) if not step_size: self.logger.error( 'Datapoints have no common time grid or are not enough. Exiting') return None if data[-1].timestamp - int(time()) > 2 * step_size: self.logger.error('Datapoints are too old (%d sec). Exiting' % ( data[-1].timestamp - int(time()))) return None data = insert_missing_datapoints(data, default, step_size) if len(data) < period_length * seasons: self.logger.error( 'Not enough (%d) datapoints. Exiting' % len(data)) return None data = data[-period_length * seasons - 1:-1] return data
def read(self): metric = self.params['metric'] period_length = self.params['period_length'] seasons = self.params['seasons'] interval = self.params['interval'] # gather data and assure requirements self._read_tdigest() data = [el for el in self.metric_sink.read(metric)] if not data[0]: self.logger.error('%s :: No Datapoints. Exiting' % self.service) return None data = sorted(data, key=lambda tup: tup.timestamp) if int(time()) - data[-1].timestamp > 3 * interval: self.logger.error( '%s :: Datapoints are too old (%d sec). Exiting' % (self.service, (int(time()) - data[-1].timestamp))) return None data = insert_missing_datapoints(data, False, interval) if len(data) < period_length * seasons + 1: self.logger.error('%s :: Not enough (%d) datapoints. Exiting' % (self.service, len(data))) return None data = data[-period_length * seasons - 1:-1] return data
def test_insert_missing_datapoints(self): default = 111 step_size = 10 exp = [TimeSeriesTuple('_', 10, 1), TimeSeriesTuple('_', 20, 1), TimeSeriesTuple('_', 30, 1), TimeSeriesTuple('_', 40, 111), TimeSeriesTuple('_', 50, 1), TimeSeriesTuple('_', 60, 1), TimeSeriesTuple('_', 70, 111), TimeSeriesTuple('_', 80, 111), TimeSeriesTuple('_', 90, 111), TimeSeriesTuple('_', 100, 1)] expect(insert_missing_datapoints(self.timeseries, default, step_size)).to.be.equal(exp)
def test_insert_missing_datapoints(self): default = 111 step_size = 10 exp = [ TimeSeriesTuple('_', 10, 1), TimeSeriesTuple('_', 20, 1), TimeSeriesTuple('_', 30, 1), TimeSeriesTuple('_', 40, 111), TimeSeriesTuple('_', 50, 1), TimeSeriesTuple('_', 60, 1), TimeSeriesTuple('_', 70, 111), TimeSeriesTuple('_', 80, 111), TimeSeriesTuple('_', 90, 111), TimeSeriesTuple('_', 100, 1) ] expect(insert_missing_datapoints(self.timeseries, default, step_size)).to.be.equal(exp)