def test_metrics_stats_can_add_values(): stats = MetricsStats() min_ts = trunc_ts(datetime.utcnow(), stats.timestep) next_ts = min_ts + stats.timestep gap_ts = next_ts + 3 * stats.timestep max_ts = gap_ts + stats.timestep first_events = [ev for ev in generate_events(10, min_ts) if ev.timestamp < min_ts + stats.timestep] next_events = [ev for ev in generate_events(10, next_ts) if ev.timestamp < next_ts + stats.timestep] after_gap_events = [ev for ev in generate_events(10, gap_ts) if ev.timestamp < gap_ts + stats.timestep] all_events = first_events + next_events + after_gap_events for ev in all_events: stats.add(ev.timestamp, ev.name, ev.value) assert stats.min_ts == min_ts assert stats.max_ts == max_ts assert sorted(ts for ts, _ in stats.frames()) == [min_ts, next_ts, gap_ts] assert stats.frame(min_ts) == _metrics_stats_frame(first_events) assert stats.frame(next_ts) == _metrics_stats_frame(next_events) assert stats.frame(gap_ts) == _metrics_stats_frame(after_gap_events) assert stats.frame(max_ts) == MetricsStatsFrame() assert stats.frame(min_ts - 0.2 * stats.timestep) == MetricsStatsFrame() assert stats.frame(min_ts + 0.8 * stats.timestep) == _metrics_stats_frame(first_events) assert stats.frame(next_ts + 1.1 * stats.timestep) == MetricsStatsFrame() assert stats.frame(gap_ts - 0.1 * stats.timestep) == MetricsStatsFrame() assert stats.frame(max_ts + 4.2 * stats.timestep) == MetricsStatsFrame()
def test_metrics_stats_merge_all_should_not_alter_source_frames(): events = generate_events(50) stats = MetricsStats() for ev in events: stats.add(ev.timestamp, ev.name, ev.value) frames = [frame for _, frame in stats.frames()] saved_frames = deepcopy(frames) MetricsStats.merge_all(frames) assert frames == saved_frames
def test_metrics_stats_total_is_merge_of_all_frames(): events = generate_events(50) stats = MetricsStats() for ev in events: stats.add(ev.timestamp, ev.name, ev.value) expected_total = MetricsStatsFrame() for _, frame in stats.frames(): expected_total.merge(frame) assert stats.total == expected_total
def test_load_metrics_from_kv_store_can_load_all_values(storage): events = generate_events(10) step = timedelta(seconds=5) ts = MockTimestamp() metrics = KvStoreMetricsCollector(storage, ts) expected_stats = MetricsStats(step) for ev in events: ts.value = ev.timestamp metrics.store_event(ev.name, ev.value) expected_stats.add(ev.timestamp, ev.name, ev.value) stats = load_metrics_from_kv_store(storage, step=step) assert stats == expected_stats
def test_load_metrics_from_kv_store_can_filter_values(storage): events = generate_events(10) step = timedelta(seconds=3) ts = MockTimestamp() metrics = KvStoreMetricsCollector(storage, ts) expected_stats = MetricsStats(step) timestamps = sorted(ev.timestamp for ev in events) min_ts = timestamps[len(events) // 3] max_ts = timestamps[2 * len(events) // 3] for ev in events: ts.value = ev.timestamp metrics.store_event(ev.name, ev.value) if min_ts <= ev.timestamp <= max_ts: expected_stats.add(ev.timestamp, ev.name, ev.value) stats = load_metrics_from_kv_store(storage, min_ts, max_ts, step) assert stats == expected_stats
def test_kv_store_metrics_collector_store_all_data_in_order(storage: KeyValueStorage): ts = MockTimestamp() metrics = KvStoreMetricsCollector(storage, ts) events = generate_events(10) for e in events: ts.value = e.timestamp metrics.add_event(e.name, e.value) stored_events = [KvStoreMetricsFormat.decode(k, v) for k, v in storage.iterator()] # Check that all events are stored assert len(stored_events) == len(events) # Check that all events are stored in correct order assert sorted(stored_events, key=lambda v: v.timestamp) == stored_events # Check that all events stored were in source events for ev in stored_events: assert ev in events # Check that all source events are in stored events for ev in events: assert ev in stored_events