def test_cache_stats_provider(self): """Test ForwardMsgCache's CacheStatsProvider implementation.""" cache = ForwardMsgCache() session = _create_mock_session() # Test empty cache self.assertEqual([], cache.get_stats()) msg1 = _create_dataframe_msg([1, 2, 3]) populate_hash_if_needed(msg1) cache.add_message(msg1, session, 0) msg2 = _create_dataframe_msg([5, 4, 3, 2, 1, 0]) populate_hash_if_needed(msg2) cache.add_message(msg2, session, 0) # Test cache with messages expected = [ CacheStat( category_name="ForwardMessageCache", cache_name="", byte_length=msg1.ByteSize(), ), CacheStat( category_name="ForwardMessageCache", cache_name="", byte_length=msg2.ByteSize(), ), ] self.assertEqual(set(expected), set(cache.get_stats()))
def test_has_stats(self): self.mock_stats = [ CacheStat( category_name="st.singleton", cache_name="foo", byte_length=128, ), CacheStat( category_name="st.memo", cache_name="bar", byte_length=256, ), ] response = self.fetch("/st-metrics") self.assertEqual(200, response.code) self.assertEqual("application/openmetrics-text", response.headers.get("Content-Type")) expected_body = ( "# TYPE cache_memory_bytes gauge\n" "# UNIT cache_memory_bytes bytes\n" "# HELP Total memory consumed by a cache.\n" 'cache_memory_bytes{cache_type="st.singleton",cache="foo"} 128\n' 'cache_memory_bytes{cache_type="st.memo",cache="bar"} 256\n' "# EOF\n").encode("utf-8") self.assertEqual(expected_body, response.body)
def get_stats(self) -> List[CacheStat]: stats: List[CacheStat] = [] for entry_hash, entry in self._entries.items(): stats.append( CacheStat( category_name="ForwardMessageCache", cache_name="", byte_length=entry.msg.ByteSize(), )) return stats
def get_stats(self) -> List[CacheStat]: stats: List[CacheStat] = [] with self._mem_cache_lock: for item_key, item_value in self._mem_cache.items(): stats.append( CacheStat( category_name="st_memo", cache_name=self.display_name, byte_length=len(item_value), )) return stats
def get_stats(self) -> List[CacheStat]: with self._lock: # Shallow-clone our caches. We don't want to hold the global # lock during stats-gathering. function_caches = self._function_caches.copy() stats = [ CacheStat("st_cache", cache.display_name, asizeof(c)) for cache in function_caches.values() for c in cache.cache ] return stats
def test_multiple_stats(self): @st.experimental_singleton def foo(count): return [3.14] * count @st.experimental_singleton def bar(): return threading.Lock() foo(1) foo(53) bar() bar() foo_cache_name = f"{foo.__module__}.{foo.__qualname__}" bar_cache_name = f"{bar.__module__}.{bar.__qualname__}" expected = [ CacheStat( category_name="st_singleton", cache_name=foo_cache_name, byte_length=get_byte_length([3.14]), ), CacheStat( category_name="st_singleton", cache_name=foo_cache_name, byte_length=get_byte_length([3.14] * 53), ), CacheStat( category_name="st_singleton", cache_name=bar_cache_name, byte_length=get_byte_length(bar()), ), ] # The order of these is non-deterministic, so check Set equality # instead of List equality self.assertEqual(set(expected), set(get_singleton_stats_provider().get_stats()))
def get_stats(self) -> List[CacheStat]: with self._files_lock: # Flatten all files into a single list all_files: List[UploadedFileRec] = [] for file_list in self._files_by_id.values(): all_files.extend(file_list) return [ CacheStat( category_name="UploadedFileManager", cache_name="", byte_length=len(file.data), ) for file in all_files ]
def test_cache_stats_provider(self): """Test CacheStatsProvider implementation.""" # Test empty manager self.assertEqual([], self.mgr.get_stats()) # Test manager with files self.mgr.add_file("session1", "widget1", FILE_1) self.mgr.add_file("session1", "widget2", FILE_2) expected = [ CacheStat( category_name="UploadedFileManager", cache_name="", byte_length=len(FILE_1.data), ), CacheStat( category_name="UploadedFileManager", cache_name="", byte_length=len(FILE_2.data), ), ] self.assertEqual(expected, self.mgr.get_stats())
def get_stats(self) -> List[CacheStat]: # We operate on a copy of our dict, to avoid race conditions # with other threads that may be manipulating the cache. files_by_id = self._files_by_id.copy() stats: List[CacheStat] = [] for file_id, file in files_by_id.items(): stats.append( CacheStat( category_name="st_in_memory_file_manager", cache_name="", byte_length=file.content_size, )) return stats
def test_get_stats(self): """StatsManager.get_stats should return all providers' stats.""" manager = StatsManager() provider1 = MockStatsProvider() provider2 = MockStatsProvider() manager.register_provider(provider1) manager.register_provider(provider2) # No stats self.assertEqual([], manager.get_stats()) # Some stats provider1.stats = [ CacheStat("provider1", "foo", 1), CacheStat("provider1", "bar", 2), ] provider2.stats = [ CacheStat("provider2", "baz", 3), CacheStat("provider2", "qux", 4), ] self.assertEqual(provider1.stats + provider2.stats, manager.get_stats())
def get_stats(self) -> List[CacheStat]: # Shallow clone our cache. Computing item sizes is potentially # expensive, and we want to minimize the time we spend holding # the lock. with self._mem_cache_lock: mem_cache = self._mem_cache.copy() stats: List[CacheStat] = [] for item_key, item_value in mem_cache.items(): stats.append( CacheStat( category_name="st_singleton", cache_name=self.display_name, byte_length=asizeof.asizeof(item_value), )) return stats
def test_protobuf_stats(self): """Stats requests are returned in OpenMetrics protobuf format if the request's Content-Type header is protobuf. """ self.mock_stats = [ CacheStat( category_name="st.singleton", cache_name="foo", byte_length=128, ), CacheStat( category_name="st.memo", cache_name="bar", byte_length=256, ), ] # Requests can have multiple Accept headers. Only one of them needs # to specify protobuf in order to get back protobuf. headers = HTTPHeaders() headers.add("Accept", "application/openmetrics-text") headers.add("Accept", "application/x-protobuf") headers.add("Accept", "text/html") response = self.fetch("/st-metrics", headers=headers) self.assertEqual(200, response.code) self.assertEqual("application/x-protobuf", response.headers.get("Content-Type")) metric_set = MetricSetProto() metric_set.ParseFromString(response.body) expected = { "metricFamilies": [{ "name": "cache_memory_bytes", "type": "GAUGE", "unit": "bytes", "help": "Total memory consumed by a cache.", "metrics": [ { "labels": [ { "name": "cache_type", "value": "st.singleton" }, { "name": "cache", "value": "foo" }, ], "metricPoints": [{ "gaugeValue": { "intValue": "128" } }], }, { "labels": [ { "name": "cache_type", "value": "st.memo" }, { "name": "cache", "value": "bar" }, ], "metricPoints": [{ "gaugeValue": { "intValue": "256" } }], }, ], }] } self.assertEqual(expected, MessageToDict(metric_set))
def get_stats(self) -> List[CacheStat]: stat = CacheStat("st_session_state", "", asizeof(self)) return [stat]