def checkStats(self, metricName, mn, mx): """Check that stats are computed correctly from the database""" engine = repository.engineFactory() with engine.begin() as conn: metricObj = ( repository.getCustomMetricByName(conn, metricName, fields=[schema.metric.c.uid, schema.metric.c.parameters])) stats = repository.getMetricStats(conn, metricObj.uid) self.assertSetEqual(set(stats.keys()), set(("min", "max"))) self.assertAlmostEqual(stats["min"], mn) self.assertAlmostEqual(stats["max"], mx)
def getStatistics(metric): """Get aggregate statistics for an Autostack metric. The metric must belong to an Autostack or a ValueError will be raised. If AWS returns no stats and there is no data in the database then an ObjectNotFoundError will be raised. :param metric: the Autostack metric to get statistics for :type metric: TODO :returns: metric statistics :rtype: dict {"min": minVal, "max": maxVal} :raises: ValueError if the metric doesn't not belong to an Autostack :raises: htm-it.app.exceptions.ObjectNotFoundError if the metric or the corresponding autostack doesn't exist; this may happen if it got deleted by another process in the meantime. :raises: htm-it.app.exceptions.MetricStatisticsNotReadyError if there are no or insufficent samples at this time; this may also happen if the metric and its data were deleted by another process in the meantime """ engine = repository.engineFactory() if metric.datasource != "autostack": raise ValueError( "Metric must belong to an Autostack but has datasource=%r" % metric.datasource) metricGetter = EC2InstanceMetricGetter() try: with engine.connect() as conn: autostack = repository.getAutostackFromMetric(conn, metric.uid) instanceMetricList = metricGetter.collectMetricStatistics(autostack, metric) finally: metricGetter.close() n = 0 mins = 0.0 maxs = 0.0 for instanceMetric in instanceMetricList: assert len(instanceMetric.records) == 1 metricRecord = instanceMetric.records[0] stats = metricRecord.value if (not isinstance(stats["min"], numbers.Number) or math.isnan(stats["min"]) or not isinstance(stats["max"], numbers.Number) or math.isnan(stats["max"])): # Cloudwatch gave us bogus data for this metric so we will exclude it continue mins += stats["min"] maxs += stats["max"] n += 1 if n == 0: # Fall back to metric_data when we don't get anything from AWS. This may # raise an MetricStatisticsNotReadyError if there is no or not enough data. with engine.connect() as conn: dbStats = repository.getMetricStats(conn, metric.uid) minVal = dbStats["min"] maxVal = dbStats["max"] else: minVal = mins / n maxVal = maxs / n # Now add the 20% buffer on the range buff = (maxVal - minVal) * 0.2 minVal -= buff maxVal += buff return {"min": minVal, "max": maxVal}