def put_metric_data(self, metric_key, timestamp, value, unit=None): def get_stats(tmp_stat): try: ret = dict(zip(self.COLUMNS, map(lambda x: x.values()[0], tmp_stat))) for v in ret: if v == None: v = float("nan") except IndexError: storm.log("index %s is not in DB." % time_idx) ret = { "SampleCount": float("nan"), "Sum": float("nan"), "Average": float("nan"), "Minimum": float("nan"), "Maximum": float("nan"), } return ret time_idx = timestamp.replace(second=0, microsecond=0) time_diff = utils.utcnow() - time_idx if timedelta(seconds=self.STATISTICS_TTL) < time_diff: msg = "index %s is older than TTL. It doesn't need to insert DB" storm.log(msg % time_idx) return if time_idx not in self.df.index: self._reindex() value = utils.to_default_unit(value, unit) try: stat = self.df.ix[time_idx] for v in stat: if v == None: v = float("nan") except KeyError: stat = self.cass.get_metric_statistics_for_key(metric_key, time_idx) stat = get_stats(stat) stat["SampleCount"] = 1.0 if isnull(stat["SampleCount"]) else stat["SampleCount"] + 1.0 stat["Sum"] = value if isnull(stat["Sum"]) else stat["Sum"] + value stat["Average"] = stat["Sum"] / stat["SampleCount"] stat["Minimum"] = value if (isnull(stat["Minimum"]) or stat["Minimum"] > value) else stat["Minimum"] stat["Maximum"] = value if (isnull(stat["Maximum"]) or stat["Maximum"] < value) else stat["Maximum"] # insert into DB stat_dict = { "SampleCount": {time_idx: stat["SampleCount"]}, "Sum": {time_idx: stat["Sum"]}, "Average": {time_idx: stat["Average"]}, "Minimum": {time_idx: stat["Minimum"]}, "Maximum": {time_idx: stat["Maximum"]}, } ttl = self.STATISTICS_TTL - time_diff.total_seconds() self.cass.insert_stat(self.metric_key, stat_dict, ttl) storm.log("metric data inserted %s" % (self.metric_key))
def put_metric_data(self, metric_key, timestamp, value, unit=None): time_idx = timestamp.replace(second=0, microsecond=0) if timedelta(seconds=self.cass.STATISTICS_TTL) < (utils.utcnow() - time_idx): msg = "index %s is older than TTL. It doesn't need to insert DB" storm.log(msg % time_idx) return if time_idx not in self.df.index: self._reindex() value = utils.to_default_unit(value, unit) try: stat = self.df.ix[time_idx] for v in stat: if v == None: v = float("nan") except KeyError: stat = self.cass.get_metric_statistics_for_key(metric_key, time_idx) if [{}, {}, {}, {}, {}] == stat: storm.log("index %s is not in DB." % time_idx) stat = { "SampleCount": float("nan"), "Sum": float("nan"), "Average": float("nan"), "Minimum": float("nan"), "Maximum": float("nan"), } else: stat = dict(zip(self.cass.STATISTICS, map(lambda x: x.values()[0], stat))) for v in stat: if v == None: v = float("nan") stat["SampleCount"] = 1.0 if isnan(stat["SampleCount"]) else stat["SampleCount"] + 1.0 stat["Sum"] = value if isnan(stat["Sum"]) else stat["Sum"] + value stat["Average"] = stat["Sum"] / stat["SampleCount"] stat["Minimum"] = value if isnan(stat["Minimum"]) or stat["Minimum"] > value else stat["Minimum"] stat["Maximum"] = value if isnan(stat["Maximum"]) or stat["Maximum"] < value else stat["Maximum"] # insert into DB stat_dict = { "SampleCount": {time_idx: stat["SampleCount"]}, "Sum": {time_idx: stat["Sum"]}, "Average": {time_idx: stat["Average"]}, "Minimum": {time_idx: stat["Minimum"]}, "Maximum": {time_idx: stat["Maximum"]}, } self.cass.insert_stat(self.metric_key, stat_dict) storm.log("metric data inserted %s" % (self.metric_key)) # self.df.ix[time_idx] = stat now = utils.utcnow().replace(second=0, microsecond=0) timedelta_buf = now - time_idx if timedelta_buf <= timedelta(seconds=self.MAX_START_PERIOD): # check alarms self.check_alarms()
def put_metric_data(self, metric_key, timestamp, value, unit=None): def get_stats(tmp_stat): try: ret = dict(zip(self.cass.STATISTICS, map(lambda x: x.values()[0], tmp_stat))) for v in ret: if v == None: v = float('nan') except IndexError: LOG.debug("index %s is not in DB.", time_idx) ret = {'SampleCount' : float('nan'), 'Sum' : float('nan'), 'Average' : float('nan'), 'Minimum' : float('nan'), 'Maximum' : float('nan') } return ret time_idx = timestamp.replace(second=0, microsecond=0) time_diff = utils.utcnow() - time_idx if timedelta(seconds=self.cass.statistics_ttl) < time_diff: msg = "index %s is older than TTL. It doesn't need to insert DB" LOG.debug(msg, time_idx) return if time_idx not in self.df.index: self._reindex() if value == None: LOG.info("metric inputted without value") return else: value = utils.to_default_unit(value, unit) try: stat = self.df.ix[time_idx] for v in stat: if v == None: v = float('nan') except KeyError: stat = self.cass.get_metric_statistics_for_key(metric_key, time_idx) stat = get_stats(stat) stat['SampleCount'] = 1.0 if isnull(stat['SampleCount']) \ else stat['SampleCount'] + 1.0 stat['Sum'] = value if isnull(stat['Sum']) \ else stat['Sum'] + value stat['Average'] = stat['Sum'] / stat['SampleCount'] stat['Minimum'] = value \ if (isnull(stat['Minimum']) or stat['Minimum'] > value) \ else stat['Minimum'] stat['Maximum'] = value \ if (isnull(stat['Maximum']) or stat['Maximum'] < value) \ else stat['Maximum'] # insert into DB stat_dict = { 'SampleCount':{time_idx: stat['SampleCount']}, 'Sum':{time_idx: stat['Sum']}, 'Average':{time_idx: stat['Average']}, 'Minimum':{time_idx: stat['Minimum']}, 'Maximum':{time_idx: stat['Maximum']} } ttl = self.cass.statistics_ttl - time_diff.total_seconds() self.updated_timestamp = utils.utcnow() if ttl > 0.1: self.cass.insert_stat(self.metric_key, stat_dict, ttl) else: LOG.debug("ttl must be positive, ttl %s", ttl) self.cass.update_metric(self.metric_key, {'updated_timestamp': self.updated_timestamp}) LOG.info("metric data inserted %s, time_idx %s", str(self), time_idx)
def put_metric_data(self, metric_key, timestamp, value, unit=None): def get_stats(tmp_stat): try: ret = dict( zip(self.cass.STATISTICS, map(lambda x: x.values()[0], tmp_stat))) for v in ret: if v == None: v = float('nan') except IndexError: LOG.debug("index %s is not in DB.", time_idx) ret = { 'SampleCount': float('nan'), 'Sum': float('nan'), 'Average': float('nan'), 'Minimum': float('nan'), 'Maximum': float('nan') } return ret time_idx = timestamp.replace(second=0, microsecond=0) time_diff = utils.utcnow() - time_idx if timedelta(seconds=self.cass.statistics_ttl) < time_diff: msg = "index %s is older than TTL. It doesn't need to insert DB" LOG.debug(msg, time_idx) return if time_idx not in self.df.index: self._reindex() if value == None: LOG.info("metric inputted without value") return else: value = utils.to_default_unit(value, unit) try: stat = self.df.ix[time_idx] for v in stat: if v == None: v = float('nan') except KeyError: stat = self.cass.get_metric_statistics_for_key( metric_key, time_idx) stat = get_stats(stat) stat['SampleCount'] = 1.0 if isnull(stat['SampleCount']) \ else stat['SampleCount'] + 1.0 stat['Sum'] = value if isnull(stat['Sum']) \ else stat['Sum'] + value stat['Average'] = stat['Sum'] / stat['SampleCount'] stat['Minimum'] = value \ if (isnull(stat['Minimum']) or stat['Minimum'] > value) \ else stat['Minimum'] stat['Maximum'] = value \ if (isnull(stat['Maximum']) or stat['Maximum'] < value) \ else stat['Maximum'] # insert into DB stat_dict = { 'SampleCount': { time_idx: stat['SampleCount'] }, 'Sum': { time_idx: stat['Sum'] }, 'Average': { time_idx: stat['Average'] }, 'Minimum': { time_idx: stat['Minimum'] }, 'Maximum': { time_idx: stat['Maximum'] } } ttl = self.cass.statistics_ttl - time_diff.total_seconds() self.updated_timestamp = utils.utcnow() if ttl > 0.1: self.cass.insert_stat(self.metric_key, stat_dict, ttl) else: LOG.debug("ttl must be positive, ttl %s", ttl) self.cass.update_metric(self.metric_key, {'updated_timestamp': self.updated_timestamp}) LOG.info("metric data inserted %s, time_idx %s", str(self), time_idx)
def put_metric_data(self, metric_key, timestamp, value, unit=None): def get_stats(tmp_stat): try: ret = dict(zip(self.cass.STATISTICS, map(lambda x: x.values()[0], tmp_stat))) for v in ret: if v == None: v = float('nan') except IndexError: storm.log("index %s is not in DB." % time_idx) ret = {'SampleCount' : float('nan'), 'Sum' : float('nan'), 'Average' : float('nan'), 'Minimum' : float('nan'), 'Maximum' : float('nan') } return ret time_idx = timestamp.replace(second=0, microsecond=0) if timedelta(seconds=self.cass.STATISTICS_TTL) < (utils.utcnow() - time_idx): msg = "index %s is older than TTL. It doesn't need to insert DB" storm.log(msg % time_idx) return if time_idx not in self.df.index: self._reindex() value = utils.to_default_unit(value, unit) try: stat = self.df.ix[time_idx] for v in stat: if v == None: v = float('nan') except KeyError: stat = self.cass.get_metric_statistics_for_key(metric_key, time_idx) stat = get_stats(stat) stat['SampleCount'] = 1.0 if isnan(stat['SampleCount']) \ else stat['SampleCount'] + 1.0 stat['Sum'] = value if isnan(stat['Sum']) \ else stat['Sum'] + value stat['Average'] = stat['Sum'] / stat['SampleCount'] stat['Minimum'] = value \ if isnan(stat['Minimum']) or stat['Minimum'] > value \ else stat['Minimum'] stat['Maximum'] = value \ if isnan(stat['Maximum']) or stat['Maximum'] < value \ else stat['Maximum'] # insert into DB stat_dict = { 'SampleCount':{time_idx: stat['SampleCount']}, 'Sum':{time_idx: stat['Sum']}, 'Average':{time_idx: stat['Average']}, 'Minimum':{time_idx: stat['Minimum']}, 'Maximum':{time_idx: stat['Maximum']} } self.cass.insert_stat(self.metric_key, stat_dict) storm.log("metric data inserted %s" % (self.metric_key)) #self.df.ix[time_idx] = stat now = utils.utcnow().replace(second=0, microsecond=0) timedelta_buf = now - time_idx if(timedelta_buf <= timedelta(seconds=self.MAX_START_PERIOD)): # check alarms self.check_alarms()