def test_get_durations_for_project_multi_metric( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:222", 0, 123) counts = store.get_durations_for_project(42) assert list(counts) == [ DurationHistogram(timestamp=222, histogram=BucketedDurations({0: 123})) ]
def test_get_durations_for_project_missing_project( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster, ) -> None: redis_cluster.hset("symbolicate_event_low_priority:duration:10:53:111", 0, 123) durations = store.get_durations_for_project(project_id=42, timestamp=113) for hist in durations.histograms: assert hist.total_count() == 0
def test_get_counts_for_project_empty( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: redis_cluster.set( "symbolicate_event_low_priority:counter:10:42:111", 0, ) redis_cluster.delete("symbolicate_event_low_priority:counter:10:42:111") counts = store.get_counts_for_project(42) assert list(counts) == []
def test_get_counts_for_project_multi_metric( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:222:0", 0, 123) counts = store.get_counts_for_project(42) assert list(counts) == [ BucketedCount(timestamp=111, count=0), ]
def test_remove_projects_from_lpq_all_members( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) redis_cluster.sadd("store.symbolicate-event-lpq-selected", 11) removed = store.remove_projects_from_lpq({1, 11}) assert removed == 2 remaining = redis_cluster.smembers("store.symbolicate-event-lpq-selected") assert remaining == set()
def test_get_durations_for_project_different_buckets( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: redis_cluster.hset("symbolicate_event_low_priority:histogram:10:42:111", 0, 123) redis_cluster.hset("symbolicate_event_low_priority:histogram:5:42:111", 20, 456) counts = store.get_durations_for_project(42) assert list(counts) == [ DurationHistogram(timestamp=111, histogram=BucketedDurations({0: 123})) ]
def test_get_durations_for_project_no_matching_keys( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster, ) -> None: redis_cluster.hset("symbolicate_event_low_priority:duration:10:53:111", 0, 123) durations = store.get_durations_for_project(42, 113) assert list(durations) == [ DurationHistogram(timestamp=110, histogram=BucketedDurations(empty_histogram())) ]
def test_get_durations_for_project_different_bucket_sizes( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster, ) -> None: redis_cluster.hset("symbolicate_event_low_priority:duration:10:42:110", 0, 123) redis_cluster.hset("symbolicate_event_low_priority:duration:5:42:110", 20, 456) durations = store.get_durations_for_project(42, 113) total = sum(h.total_count() for h in durations.histograms) assert total == 123
def test_get_durations_for_project_negative_count( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster, ) -> None: redis_cluster.hset("symbolicate_event_low_priority:duration:10:42:110", 0, -123) histogram = empty_histogram() histogram[0] = -123 durations = store.get_durations_for_project(42, 113) assert list(durations) == [ DurationHistogram(timestamp=110, histogram=BucketedDurations(histogram)) ]
def test_increment_project_event_counter_same_bucket( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: store.increment_project_event_counter(17, 1147) time.sleep(0.2) store.increment_project_event_counter(17, 1149) assert redis_cluster.get( "symbolicate_event_low_priority:counter:10:17:1140") == "2" time.sleep(0.3) # the second insert should have refreshed the ttl assert redis_cluster.get( "symbolicate_event_low_priority:counter:10:17:1140") == "2" time.sleep(0.2) # it should have expired by now assert redis_cluster.get( "symbolicate_event_low_priority:counter:10:17:1140") is None
def test_get_durations_for_projects_with_gap( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: store._duration_time_window = 40 redis_cluster.hset("symbolicate_event_low_priority:duration:10:42:110", 20, 3) redis_cluster.hset("symbolicate_event_low_priority:duration:10:42:150", 30, 17) durations = store.get_durations_for_project(42, 154) assert durations.histograms[-1].total_count() == 17 assert durations.histograms[-2].total_count() == 0 assert durations.histograms[-3].total_count() == 0 assert durations.histograms[-4].total_count() == 0 assert durations.histograms[-5].total_count() == 3
def test_get_counts_for_projects_with_gap( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: store._counter_time_window = 40 redis_cluster.set("symbolicate_event_low_priority:counter:10:42:110", 3) redis_cluster.set("symbolicate_event_low_priority:counter:10:42:150", 17) counts = store.get_counts_for_project(42, 154) assert list(counts) == [ BucketedCount(timestamp=110, count=3), BucketedCount(timestamp=120, count=0), BucketedCount(timestamp=130, count=0), BucketedCount(timestamp=140, count=0), BucketedCount(timestamp=150, count=17), ]
def test_get_counts_for_projects_with_gap( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: store._counter_time_window = 40 redis_cluster.set("symbolicate_event_low_priority:counter:10:42:110", 3) redis_cluster.set("symbolicate_event_low_priority:counter:10:42:150", 17) buckets = store.get_counts_for_project(project_id=42, timestamp=154) assert buckets.total_count() == 20 assert buckets.counts[-1] == 17 assert buckets.counts[-2] == 0 assert buckets.counts[-3] == 0 assert buckets.counts[-4] == 0 assert buckets.counts[-5] == 3
def test_increment_project_event_counter_same_bucket( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: store.increment_project_event_counter(17, 1147) store.increment_project_event_counter(17, 1149) assert redis_cluster.get( "symbolicate_event_low_priority:counter:10:17:1140") == "2"
def test_get_durations_for_project_multi_metric( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster, ) -> None: redis_cluster.set("symbolicate_event_low_priority:counter:10:42:110", 0) redis_cluster.hset("symbolicate_event_low_priority:duration:10:42:220", 0, 123) histogram = empty_histogram() histogram[0] = 123 durations = store.get_durations_for_project(42, 225) assert list(durations) == [ DurationHistogram(timestamp=220, histogram=BucketedDurations(histogram)) ]
def test_get_counts_for_project_empty( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: redis_cluster.set( "symbolicate_event_low_priority:counter:10:42:111", 0, ) redis_cluster.delete("symbolicate_event_low_priority:counter:10:42:111") counts = store.get_counts_for_project(42, 113) assert list(counts) == [BucketedCount(timestamp=110, count=0)] store._counter_time_window = 20 counts = store.get_counts_for_project(42, 113) assert list(counts) == [ BucketedCount(timestamp=90, count=0), BucketedCount(timestamp=100, count=0), BucketedCount(timestamp=110, count=0), ]
def test_get_durations_for_projects_with_gap( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: store._duration_time_window = 40 redis_cluster.hset("symbolicate_event_low_priority:duration:10:42:110", 20, 3) redis_cluster.hset("symbolicate_event_low_priority:duration:10:42:150", 30, 17) hist1 = empty_histogram() hist1[20] = 3 hist2 = empty_histogram() hist2[30] = 17 durations = store.get_durations_for_project(42, 154) assert list(durations) == [ DurationHistogram(timestamp=110, histogram=hist1), DurationHistogram(timestamp=120, histogram=empty_histogram()), DurationHistogram(timestamp=130, histogram=empty_histogram()), DurationHistogram(timestamp=140, histogram=empty_histogram()), DurationHistogram(timestamp=150, histogram=hist2), ]
def test_get_durations_for_project_multi_key_multi_durations( store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster, ) -> None: redis_cluster.hset("symbolicate_event_low_priority:duration:10:42:110", 0, 123) redis_cluster.hset("symbolicate_event_low_priority:duration:10:42:110", 10, 456) redis_cluster.hset("symbolicate_event_low_priority:duration:10:53:110", 0, 123) histogram = empty_histogram() histogram[0] = 123 histogram[10] = 456 durations = store.get_durations_for_project(42, 113) assert list(durations) == [ DurationHistogram(timestamp=110, histogram=BucketedDurations(histogram)), ]
def test_get_lpq_projects_filled(store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: redis_cluster.sadd("store.symbolicate-event-lpq-selected", 1) in_lpq = store.get_lpq_projects() assert in_lpq == {1}
def test_add_project_to_lpq_unset(store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: added = store.add_project_to_lpq(1) assert added in_lpq = redis_cluster.smembers("store.symbolicate-event-lpq-selected") assert in_lpq == {"1"}
def test_projects_one_count(store: RedisRealtimeMetricsStore, redis_cluster: redis._RedisCluster) -> None: redis_cluster.set("symbolicate_event_low_priority:counter:10:42:111", 0) candidates = store.projects() assert list(candidates) == [42]