def test_creation_datetime(self): time_window_start = time_utils.now() - datetime.timedelta(weeks=1) time_window_end = time_utils.now() - datetime.timedelta(days=1) created_after = time_utils.now() - datetime.timedelta(hours=1) time_window_start_millis = time_utils.epoch_millis(time_window_start) time_window_end_millis = time_utils.epoch_millis(time_window_end) created_after_millis = time_utils.epoch_millis(created_after) # as datetime sample_query = SummarySampleQuery( time_window_start=time_window_start, time_window_end=time_window_end, created_after=created_after, ) proto_request = sample_query._to_proto_request() assert (proto_request.filter.time_window_start_at_millis == time_window_start_millis) assert proto_request.filter.time_window_end_at_millis == time_window_end_millis assert proto_request.filter.created_at_after_millis == created_after_millis # as millis sample_query = SummarySampleQuery( time_window_start=time_window_start_millis, time_window_end=time_window_end_millis, created_after=created_after_millis, ) proto_request = sample_query._to_proto_request() assert (proto_request.filter.time_window_start_at_millis == time_window_start_millis) assert proto_request.filter.time_window_end_at_millis == time_window_end_millis assert proto_request.filter.created_at_after_millis == created_after_millis
def test_creation_datetime(self, client, strs, created_entities): strs = iter(strs) notification_channels = client.operations.notification_channels created_at = time_utils.now() - datetime.timedelta(weeks=1) updated_at = time_utils.now() - datetime.timedelta(days=1) created_at_millis = time_utils.epoch_millis(created_at) updated_at_millis = time_utils.epoch_millis(updated_at) # as datetime channel = notification_channels.create( next(strs), SlackNotificationChannel(next(strs)), created_at=created_at, updated_at=updated_at, ) created_entities.append(channel) assert channel._msg.created_at_millis == created_at_millis assert channel._msg.updated_at_millis == updated_at_millis # as millis channel = notification_channels.create( next(strs), SlackNotificationChannel(next(strs)), created_at=created_at_millis, updated_at=updated_at_millis, ) created_entities.append(channel) assert channel._msg.created_at_millis == created_at_millis assert channel._msg.updated_at_millis == updated_at_millis
def test_creation_override_datetimes(self, summary, strs): strs = iter(strs) alerter = FixedAlerter(comparison.GreaterThan(0.7)) created_at = time_utils.now() - datetime.timedelta(weeks=1) updated_at = time_utils.now() - datetime.timedelta(days=1) last_evaluated_at = time_utils.now() - datetime.timedelta(hours=1) created_at_millis = time_utils.epoch_millis(created_at) updated_at_millis = time_utils.epoch_millis(updated_at) last_evaluated_at_millis = time_utils.epoch_millis(last_evaluated_at) # as datetime alert = summary.alerts.create( next(strs), alerter, _created_at=created_at, _updated_at=updated_at, _last_evaluated_at=last_evaluated_at, ) assert alert._msg.created_at_millis == created_at_millis assert alert._msg.updated_at_millis == updated_at_millis assert alert._msg.last_evaluated_at_millis == last_evaluated_at_millis # as millis alert = summary.alerts.create( next(strs), alerter, _created_at=created_at_millis, _updated_at=updated_at_millis, _last_evaluated_at=last_evaluated_at_millis, ) assert alert._msg.created_at_millis == created_at_millis assert alert._msg.updated_at_millis == updated_at_millis assert alert._msg.last_evaluated_at_millis == last_evaluated_at_millis
def log_sample(self, data, labels, time_window_start, time_window_end, created_at=None): """Log a summary sample for this summary. Parameters ---------- data A :mod:`VertaDataType <verta.data_types>` consistent with the type of this summary. labels : dict of str to str, optional A mapping between label keys and values. time_window_start : datetime.datetime or int Either a timezone aware datetime object or unix epoch milliseconds. time_window_end : datetime.datetime or int Either a timezone aware datetime object or unix epoch milliseconds. created_after : datetime.datetime or int, optional Either a timezone aware datetime object or unix epoch milliseconds. Defaults to now, but offered as a parameter to permit backfilling of summary samples. Returns ------- :class:`~verta.monitoring.summaries.summary_sample.SummarySample` A persisted summary sample. """ if not isinstance(data, data_types._VertaDataType): raise TypeError( "expected a supported VertaDataType, found {}".format( type(data))) if data._type_string() != self.type: raise TypeError("expected a {}, found {}".format( self.type, data._type_string())) if not created_at: created_at = time_utils.now() content = json.dumps(data._as_dict()) created_at_millis = time_utils.epoch_millis(created_at) window_start_millis = time_utils.epoch_millis(time_window_start) window_end_millis = time_utils.epoch_millis(time_window_end) msg = CreateSummarySample( summary_id=self.id, summary_type_name=data._type_string(), content=content, labels=labels, created_at_millis=created_at_millis, time_window_start_at_millis=window_start_millis, time_window_end_at_millis=window_end_millis, ) endpoint = "/api/v1/summaries/createSample" response = self._conn.make_proto_request("POST", endpoint, body=msg) result_msg = self._conn.must_proto_response(response, SummarySampleProto) return SummarySample(self._conn, self._conf, result_msg)
def log_sample( self, data, labels, time_window_start, time_window_end, created_at=None ): if not isinstance(data, data_types._VertaDataType): raise TypeError( "expected a supported VertaDataType, found {}".format(type(data)) ) if data._type_string() != self.type: raise TypeError( "expected a {}, found {}".format(self.type, data._type_string()) ) if not created_at: created_at = time_utils.now() content = json.dumps(data._as_dict()) created_at_millis = time_utils.epoch_millis(created_at) window_start_millis = time_utils.epoch_millis(time_window_start) window_end_millis = time_utils.epoch_millis(time_window_end) msg = CreateSummarySample( summary_id=self.id, summary_type_name=data._type_string(), content=content, labels=labels, created_at_millis=created_at_millis, time_window_start_at_millis=window_start_millis, time_window_end_at_millis=window_end_millis, ) endpoint = "/api/v1/summaries/createSample" response = self._conn.make_proto_request("POST", endpoint, body=msg) result_msg = self._conn.must_proto_response(response, SummarySampleProto) return SummarySample(self._conn, self._conf, result_msg)
def _update_last_evaluated_at(self, last_evaluated_at=None): if last_evaluated_at is None: last_evaluated_at = time_utils.now() millis = time_utils.epoch_millis(last_evaluated_at) alert_msg = _AlertService.Alert(last_evaluated_at_millis=millis, ) self._update(alert_msg)
def summary_sample(client, summary): end_time = time_utils.now() start_time = end_time - datetime.timedelta(hours=1) sample = summary.log_sample( data_types.NumericValue(3), {"foo": "bar"}, start_time, end_time, ) return sample
def test_creation_datetime(self, monitored_entity, strs, created_entities): strs = iter(strs) alerts = monitored_entity.alerts alerter = FixedAlerter(comparison.GreaterThan(0.7)) sample_query = SummarySampleQuery() created_at = time_utils.now() - datetime.timedelta(weeks=1) updated_at = time_utils.now() - datetime.timedelta(days=1) last_evaluated_at = time_utils.now() - datetime.timedelta(hours=1) created_at_millis = time_utils.epoch_millis(created_at) updated_at_millis = time_utils.epoch_millis(updated_at) last_evaluated_at_millis = time_utils.epoch_millis(last_evaluated_at) # as datetime alert = alerts.create( next(strs), alerter, sample_query, created_at=created_at, updated_at=updated_at, last_evaluated_at=last_evaluated_at, ) created_entities.append(alert) assert alert._msg.created_at_millis == created_at_millis assert alert._msg.updated_at_millis == updated_at_millis assert alert._msg.last_evaluated_at_millis == last_evaluated_at_millis # as millis alert = alerts.create( next(strs), alerter, sample_query, created_at=created_at_millis, updated_at=updated_at_millis, last_evaluated_at=last_evaluated_at_millis, ) created_entities.append(alert) assert alert._msg.created_at_millis == created_at_millis assert alert._msg.updated_at_millis == updated_at_millis assert alert._msg.last_evaluated_at_millis == last_evaluated_at_millis
def summary_sample(client, monitored_entity, created_entities): summary = client.operations.summaries.create( _utils.generate_default_name(), data_types.NumericValue, monitored_entity, ) end_time = time_utils.now() start_time = end_time - datetime.timedelta(hours=1) sample = summary.log_sample( data_types.NumericValue(3), {"foo": "bar"}, start_time, end_time, ) yield sample # TODO: use `created_entities` if/when Summary reimplements delete() client.operations.summaries.delete([summary])
def test_update_last_evaluated_at(self, summary): name = _utils.generate_default_name() alerter = FixedAlerter(comparison.GreaterThan(0.7)) alert = summary.alerts.create(name, alerter) alert._fetch_with_no_cache() initial = alert._msg.last_evaluated_at_millis alert._update_last_evaluated_at() alert._fetch_with_no_cache() assert alert._msg.last_evaluated_at_millis > initial yesterday = time_utils.now() - datetime.timedelta(days=1) yesterday_millis = time_utils.epoch_millis(yesterday) alert._update_last_evaluated_at(yesterday) alert._fetch_with_no_cache() assert alert._msg.last_evaluated_at_millis == yesterday_millis
class TestNumericSummarySamples: values = list(range(1, 5)) numeric_values = map(data_types.NumericValue, values) now = time_utils.now() yesterday = now - timedelta(days=1) labels = {} @pytest.fixture(scope="class") def summary_entity(self, class_client): return class_client.monitoring.get_or_create_monitored_entity() @pytest.fixture(scope="class") def numeric_summary(self, class_client, summary_entity): return class_client.monitoring.summaries.create( "test_numeric_summary", data_types.NumericValue, summary_entity) @pytest.fixture(scope="class") def numeric_samples(self, class_client, numeric_summary): created = [] for numeric in self.numeric_values: logged = numeric_summary.log_sample(numeric, self.labels, self.yesterday, self.now) created.append(logged) return created def test_find_summary_samples(self, class_client, numeric_summary, numeric_samples): found_samples = numeric_summary.find_samples() assert len(found_samples) == len(self.values) created_ids = list(map(lambda sample: sample.id, numeric_samples)) found_ids = list(map(lambda sample: sample.id, found_samples)) for id in found_ids: assert id in created_ids def test_aggregate_summary_samples(self, class_client, numeric_summary, numeric_samples): found_samples = numeric_summary.find_samples( SummarySampleQuery(aggregation=Aggregation("1d", "sum"))) assert len(found_samples) == 1 aggregated_sample = found_samples[0] assert aggregated_sample.content == data_types.NumericValue( sum(self.values))
def test_update_last_evaluated_at(self, summary): name = _utils.generate_default_name() alerter = FixedAlerter(comparison.GreaterThan(0.7)) alert = summary.alerts.create(name, alerter) alert._fetch_with_no_cache() initial = alert._msg.last_evaluated_at_millis alert._update_last_evaluated_at() alert._fetch_with_no_cache() assert alert._msg.last_evaluated_at_millis > initial yesterday = time_utils.now() - datetime.timedelta(days=1) yesterday_millis = time_utils.epoch_millis(yesterday) # TODO: remove following line when backend stops round to nearest sec yesterday_millis = round(yesterday_millis, -3) alert._update_last_evaluated_at(yesterday) alert._fetch_with_no_cache() assert alert._msg.last_evaluated_at_millis == yesterday_millis
def test_update_last_evaluated_at(self, monitored_entity, created_entities): alerts = monitored_entity.alerts name = _utils.generate_default_name() alerter = FixedAlerter(comparison.GreaterThan(0.7)) sample_query = SummarySampleQuery() alert = alerts.create(name, alerter, sample_query) created_entities.append(alert) alert._fetch_with_no_cache() initial = alert._msg.last_evaluated_at_millis alert._update_last_evaluated_at() alert._fetch_with_no_cache() assert alert._msg.last_evaluated_at_millis > initial yesterday = time_utils.now() - datetime.timedelta(days=1) yesterday_millis = time_utils.epoch_millis(yesterday) # TODO: remove following line when backend stops round to nearest sec yesterday_millis = round(yesterday_millis, -3) alert._update_last_evaluated_at(yesterday) alert._fetch_with_no_cache() assert alert._msg.last_evaluated_at_millis == yesterday_millis
def test_summary_labels(self, client): pytest.importorskip("scipy") summaries = client.operations.summaries monitored_entity = client.operations.get_or_create_monitored_entity() summary_name = "summary_v2_{}".format(generate_default_name()) summary = summaries.create(summary_name, data_types.DiscreteHistogram, monitored_entity) assert isinstance(summary, Summary) summaries_for_monitored_entity = SummaryQuery( monitored_entities=[monitored_entity]) retrieved_summaries = summaries.find(summaries_for_monitored_entity) assert len(retrieved_summaries) > 0 for s in retrieved_summaries: assert isinstance(s, Summary) now = time_utils.now() yesterday = now - timedelta(days=1) discrete_histogram = data_types.DiscreteHistogram( buckets=["hotdog", "not hotdog"], data=[100, 20]) labels = {"env": "test", "color": "blue"} summary_sample = summary.log_sample( discrete_histogram, labels=labels, time_window_start=yesterday, time_window_end=now, ) assert isinstance(summary_sample, SummarySample) float_histogram = data_types.FloatHistogram( bucket_limits=[1, 13, 25, 37, 49, 61], data=[15, 53, 91, 34, 7], ) labels2 = {"env": "test", "color": "red"} with pytest.raises(TypeError): summary_sample_2 = summary.log_sample( float_histogram, labels=labels2, time_window_start=yesterday, time_window_end=now, ) labels = client.operations.labels retrieved_label_keys = labels.find_keys( summary_query=summaries_for_monitored_entity) assert len(retrieved_label_keys) > 0 if retrieved_label_keys: retrieved_labels = labels.find_values( summary_query=summaries_for_monitored_entity, keys=retrieved_label_keys) for key in retrieved_label_keys: assert key in retrieved_labels all_samples_for_summary = summary.find_samples() assert len(all_samples_for_summary) == 1 blue_samples = summary.find_samples( SummarySampleQuery(labels={"color": ["blue"]}), ) assert len(blue_samples) == 1