def test_get_stats_second_times_on_a_stat_buffer_with_aggregation_periode_of_1_while_3_measure_where_append_on_2_seconds_return_None(): buffer = StatBuffer(1) buffer.append(M1, 'ab') buffer.append(M2, 'ab') buffer.append(M3, 'ab') buffer.get_stats('ab') assert buffer.get_stats('ab') is None
def test_get_stats_second_times_on_a_stat_buffer_with_aggregation_periode_of_1_while_4_measure_where_append_on_2_seconds_return_good_result_for_two_last_measure(): buffer = StatBuffer(1) buffer.append(M1, 'ab') buffer.append(M2, 'ab') buffer.append(M3, 'ab') buffer.append(M4, 'ab') buffer.get_stats('ab') assert buffer.get_stats('ab') == { 'mean': 3.5, 'std': 0.5, 'min': 3.0, 'max': 4.0, 'tags': {'t1': 'a', 't2': 'b'}, 'time': 4 }
def test_get_stats_on_a_stat_buffer_with_aggregation_periode_of_1_while_3_measure_where_append_on_2_seconds_return_stats_on_two_first_results(): buffer = StatBuffer(1) buffer.append(M1, 'ab') buffer.append(M2, 'ab') buffer.append(M3, 'ab') assert buffer.get_stats('ab') == { 'mean': 1.5, 'std': 0.5, 'min': 1.0, 'max': 2.0, 'tags': {'t1': 'a', 't2': 'b'}, 'time': 2 }
def test_get_stats_on_a_key_that_was_never_append_must_raise_KeyError(): buffer = StatBuffer(3) buffer.append(M2, 'ab') with pytest.raises(KeyError): buffer.get_stats('qlksjdq')
class PrometheusDB(BasePrometheusDB): """ Database that expose received data as metric in order to be scrapped by a prometheus instance Could only be used with a pusher actor """ def __init__(self, report_type: Type[Report], port: int, address: str, metric_name: str, metric_description: str, aggregation_periode: int, tags: List[str]): """ :param address: address that expose the metric :param port: :param metric_name: :param metric_description: short sentence that describe the metric :param aggregation_periode: number of second for the value must be aggregated before compute statistics on them :param tags: metadata used to tag metric """ BasePrometheusDB.__init__(self, report_type, port, address, metric_name, metric_description, tags) self.aggregation_periode = aggregation_periode self.final_tags = ['sensor', 'target'] + tags self.mean_metric = None self.std_metric = None self.min_metric = None self.max_metric = None self.exposed_measure = {} self.measure_for_current_period = {} self.current_period_end = 0 self.buffer = StatBuffer(aggregation_periode) def __iter__(self): raise NotImplementedError() def _init_metrics(self): self.mean_metric = Gauge(self.metric_name + '_mean', self.metric_description + '(MEAN)', self.final_tags) self.std_metric = Gauge(self.metric_name + '_std', self.metric_description + '(STD)', self.final_tags) self.min_metric = Gauge(self.metric_name + '_min', self.metric_description + '(MIN)', self.final_tags) self.max_metric = Gauge(self.metric_name + '_max', self.metric_description + '(MAX)', self.final_tags) def _expose_data(self, key): aggregated_value = self.buffer.get_stats(key) if aggregated_value is None: return kwargs = { label: aggregated_value['tags'][label] for label in self.final_tags } try: self.mean_metric.labels(**kwargs).set(aggregated_value['mean']) self.std_metric.labels(**kwargs).set(aggregated_value['std']) self.min_metric.labels(**kwargs).set(aggregated_value['min']) self.max_metric.labels(**kwargs).set(aggregated_value['max']) except TypeError: self.mean_metric.labels(kwargs).set(aggregated_value['mean']) self.std_metric.labels(kwargs).set(aggregated_value['std']) self.min_metric.labels(kwargs).set(aggregated_value['min']) self.max_metric.labels(kwargs).set(aggregated_value['max']) def _report_to_measure_and_key(self, report): value = self.report_type.to_prometheus(report, self.tags) key = ''.join([str(value['tags'][tag]) for tag in self.final_tags]) return key, value def _update_exposed_measure(self): updated_exposed_measure = {} for key in self.exposed_measure: if key not in self.measure_for_current_period: args = self.exposed_measure[key] self.mean_metric.remove(*args) self.std_metric.remove(*args) self.min_metric.remove(*args) self.max_metric.remove(*args) else: updated_exposed_measure[key] = self.exposed_measure[key] self.exposed_measure = updated_exposed_measure def _append_measure_from_old_period_to_buffer_and_expose_data(self): for old_key, old_measure_list in self.measure_for_current_period.items( ): for old_measure in old_measure_list: self.buffer.append(old_measure, old_key) self._expose_data(old_key) def _reinit_persiod(self, new_measure_time): self.current_period_end = new_measure_time + self.aggregation_periode self.measure_for_current_period = {} def save(self, report: Report): """ Override from BaseDB :param report: Report to save """ key, measure = self._report_to_measure_and_key(report) if measure['time'] > self.current_period_end: self._append_measure_from_old_period_to_buffer_and_expose_data() self._update_exposed_measure() self._reinit_persiod(measure['time']) if key not in self.exposed_measure: args = [measure['tags'][label] for label in self.final_tags] self.exposed_measure[key] = args if key not in self.measure_for_current_period: self.measure_for_current_period[key] = [] self.measure_for_current_period[key].append(measure) def save_many(self, reports: List[Report]): """ Save a batch of data :param reports: Batch of data. """ for report in reports: self.save(report)
class PrometheusDB(BaseDB): """ Database that expose received data as metric in order to be scrapped by a prometheus instance Could only be used with a pusher actor """ def __init__(self, port: int, address: str, metric_name: str, metric_description: str, report_model: ReportModel, aggregation_periode: int): """ :param address: address that expose the metric :param port: :param metric_name: :param metric_description: short sentence that describe the metric :param report_model: model describing the receved report :param aggregation_periode: number of second for the value must be aggregated before compute statistics on them """ BaseDB.__init__(self) self.address = address self.port = port self.metric_name = metric_name self.metric_description = metric_description self.report_model = report_model self.mean_metric = None self.std_metric = None self.buffer = StatBuffer(aggregation_periode) def connect(self): """ Start a HTTP server exposing one metric """ self.mean_metric = Gauge(self.metric_name + '_mean', self.metric_description + '(MEAN)', self.report_model.get_tags()) self.std_metric = Gauge(self.metric_name + '_std', self.metric_description + '(STD)', self.report_model.get_tags()) start_http_server(self.port, addr=self.address) def _expose_data(self, key): aggregated_value = self.buffer.get_stats(key) if aggregated_value is None: return kwargs = {label: aggregated_value['tags'][label] for label in self.report_model.get_tags()} self.mean_metric.labels(**kwargs).set(aggregated_value['mean']) self.std_metric.labels(**kwargs).set(aggregated_value['std']) def save(self, report: Report, report_model: ReportModel): """ Override from BaseDB :param report: Report to save :param report_model: ReportModel """ value = report_model.to_prometheus(report.serialize()) key = ''.join([value['tags'][tag] for tag in self.report_model.get_tags()]) self.buffer.append(value, key) self._expose_data(key) def save_many(self, reports: List[Report], report_model: ReportModel): """ Save a batch of data :param reports: Batch of data. :param report_model: ReportModel """ value = report_model.to_prometheus(reports[0].serialize()) key = ''.join([value['tags'][tag] for tag in self.report_model.get_tags()]) for report in reports: value = report_model.to_prometheus(report.serialize()) self.buffer.append(value, key) self._expose_data(key)