コード例 #1
0
ファイル: test_cache.py プロジェクト: samuelyi/synapse
    def test_cache_with_asterisk_in_name(self):
        """Some caches have asterisks in their name, test that they are set correctly."""

        config = {
            "caches": {
                "per_cache_factors": {"*cache_a*": 5, "cache_b": 6, "cache_c": 2}
            }
        }
        self.config._environ = {
            "SYNAPSE_CACHE_FACTOR_CACHE_A": "2",
            "SYNAPSE_CACHE_FACTOR_CACHE_B": 3,
        }
        self.config.read_config(config, config_dir_path="", data_dir_path="")
        self.config.resize_all_caches()

        cache_a = LruCache(100)
        add_resizable_cache("*cache_a*", cache_resize_callback=cache_a.set_cache_factor)
        self.assertEqual(cache_a.max_size, 200)

        cache_b = LruCache(100)
        add_resizable_cache("*Cache_b*", cache_resize_callback=cache_b.set_cache_factor)
        self.assertEqual(cache_b.max_size, 300)

        cache_c = LruCache(100)
        add_resizable_cache("*cache_c*", cache_resize_callback=cache_c.set_cache_factor)
        self.assertEqual(cache_c.max_size, 200)
コード例 #2
0
    def __get__(self, obj, owner):
        cache = LruCache(
            cache_name=self.orig.__name__,
            max_size=self.max_entries,
        )  # type: LruCache[CacheKey, Any]

        get_cache_key = self.cache_key_builder
        sentinel = LruCacheDescriptor._Sentinel.sentinel

        @functools.wraps(self.orig)
        def _wrapped(*args, **kwargs):
            invalidate_callback = kwargs.pop("on_invalidate", None)
            callbacks = (invalidate_callback,) if invalidate_callback else ()

            cache_key = get_cache_key(args, kwargs)

            ret = cache.get(cache_key, default=sentinel, callbacks=callbacks)
            if ret != sentinel:
                return ret

            # Add our own `cache_context` to argument list if the wrapped function
            # has asked for one
            if self.add_cache_context:
                kwargs["cache_context"] = _CacheContext.get_instance(cache, cache_key)

            ret2 = self.orig(obj, *args, **kwargs)
            cache.set(cache_key, ret2, callbacks=callbacks)

            return ret2

        wrapped = cast(_CachedFunction, _wrapped)
        wrapped.cache = cache
        obj.__dict__[self.orig.__name__] = wrapped

        return wrapped
コード例 #3
0
    def test_evict(self):
        setup_expire_lru_cache_entries(self.hs)

        cache = LruCache(5, clock=self.hs.get_clock())

        # Check that we evict entries we haven't accessed for 30 minutes.
        cache["key1"] = 1
        cache["key2"] = 2

        self.reactor.advance(20 * 60)

        self.assertEqual(cache.get("key1"), 1)

        self.reactor.advance(20 * 60)

        # We have only touched `key1` in the last 30m, so we expect that to
        # still be in the cache while `key2` should have been evicted.
        self.assertEqual(cache.get("key1"), 1)
        self.assertEqual(cache.get("key2"), None)

        # Check that re-adding an expired key works correctly.
        cache["key2"] = 3
        self.assertEqual(cache.get("key2"), 3)

        self.reactor.advance(20 * 60)

        self.assertEqual(cache.get("key2"), 3)

        self.reactor.advance(20 * 60)

        self.assertEqual(cache.get("key1"), None)
        self.assertEqual(cache.get("key2"), 3)
コード例 #4
0
ファイル: descriptors.py プロジェクト: varesa/synapse
    def __init__(self,
                 name,
                 max_entries=1000,
                 keylen=1,
                 tree=False,
                 iterable=False):
        cache_type = TreeCache if tree else dict
        self._pending_deferred_cache = cache_type()

        self.cache = LruCache(
            max_size=max_entries,
            keylen=keylen,
            cache_type=cache_type,
            size_callback=(lambda d: len(d)) if iterable else None,
            evicted_callback=self._on_evicted,
        )

        self.name = name
        self.keylen = keylen
        self.thread = None
        self.metrics = register_cache(
            "cache",
            name,
            self.cache,
            collect_callback=self._metrics_collection_callback,
        )
コード例 #5
0
    def __init__(self, name, max_entries=1000):
        self.cache = LruCache(
            max_size=max_entries, cache_name=name,
            size_callback=len)  # type: LruCache[Any, DictionaryEntry]

        self.name = name
        self.sequence = 0
        self.thread = None
コード例 #6
0
 def test_setdefault(self):
     cache = LruCache(1)
     self.assertEquals(cache.setdefault("key", 1), 1)
     self.assertEquals(cache.get("key"), 1)
     self.assertEquals(cache.setdefault("key", 2), 1)
     self.assertEquals(cache.get("key"), 1)
     cache["key"] = 2  # Make sure overriding works.
     self.assertEquals(cache.get("key"), 2)
コード例 #7
0
ファイル: dictionary_cache.py プロジェクト: mjattiot/synapse
    def __init__(self, name: str, max_entries: int = 1000):
        self.cache: LruCache[KT, DictionaryEntry] = LruCache(
            max_size=max_entries, cache_name=name, size_callback=len
        )

        self.name = name
        self.sequence = 0
        self.thread: Optional[threading.Thread] = None
コード例 #8
0
    def __init__(self, hs):
        self.hs = hs
        self.clock = hs.get_clock()
        self.store = hs.get_datastore()
        self.state = hs.get_state_handler()
        self.TOKEN_NOT_FOUND_HTTP_STATUS = 401

        self.token_cache = LruCache(CACHE_SIZE_FACTOR * 10000)
        register_cache("token_cache", self.token_cache)
コード例 #9
0
    def test_zero_size_drop_from_cache(self) -> None:
        """Test that `drop_from_cache` works correctly with 0-sized entries."""
        cache: LruCache[str, List[int]] = LruCache(5, size_callback=lambda x: 0)
        cache["key1"] = []

        self.assertEqual(len(cache), 0)
        cache.cache["key1"].drop_from_cache()
        self.assertIsNone(
            cache.pop("key1"), "Cache entry should have been evicted but wasn't"
        )
コード例 #10
0
    def __init__(self, hs):
        self.hs = hs
        self.clock = hs.get_clock()
        self.store = hs.get_datastore()
        self.state = hs.get_state_handler()

        self.token_cache = LruCache(CACHE_SIZE_FACTOR * 10000)
        register_cache("cache", "token_cache", self.token_cache)

        self._account_validity = hs.config.account_validity
コード例 #11
0
    def __init__(
        self,
        database: DatabasePool,
        db_conn: LoggingDatabaseConnection,
        hs: "HomeServer",
    ):
        super().__init__(database, db_conn, hs)

        self.client_ip_last_seen: LruCache[tuple, int] = LruCache(
            cache_name="client_ip_last_seen", max_size=50000
        )
コード例 #12
0
    def __init__(self, name, max_entries=1000, keylen=1, tree=False):
        cache_type = TreeCache if tree else dict
        self.cache = LruCache(
            max_size=max_entries, keylen=keylen, cache_type=cache_type
        )

        self.name = name
        self.keylen = keylen
        self.sequence = 0
        self.thread = None
        self.metrics = register_cache(name, self.cache)
コード例 #13
0
    def __init__(self, database: DatabasePool, db_conn, hs):
        super().__init__(database, db_conn, hs)

        if hs.config.run_background_tasks:
            hs.get_clock().looping_call(self._delete_old_forward_extrem_cache,
                                        60 * 60 * 1000)

        # Cache of event ID to list of auth event IDs and their depths.
        self._event_auth_cache = LruCache(
            500000, "_event_auth_cache",
            size_callback=len)  # type: LruCache[str, List[Tuple[str, int]]]
コード例 #14
0
ファイル: test_cache.py プロジェクト: yvwvnacb/synapse
    def test_global_instantiated_after_config_load(self):
        """
        If a cache is instantiated after the config is read, it will be
        immediately resized to the correct size given the global factor if there
        is no per-cache factor.
        """
        config = {"caches": {"global_factor": 1.5}}
        t = TestConfig()
        t.read_config(config, config_dir_path="", data_dir_path="")

        cache = LruCache(100)
        add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor)
        self.assertEqual(cache.max_size, 150)
コード例 #15
0
ファイル: dictionary_cache.py プロジェクト: skbaum/synapse
    def __init__(self, name, max_entries=1000):
        self.cache = LruCache(max_size=max_entries)

        self.name = name
        self.sequence = 0
        self.thread = None
        # caches_by_name[name] = self.cache

        class Sentinel(object):
            __slots__ = []

        self.sentinel = Sentinel()
        caches_by_name[name] = self.cache
コード例 #16
0
    def __init__(self, name, max_entries=1000, keylen=1, lru=True):
        if lru:
            self.cache = LruCache(max_size=max_entries)
            self.max_entries = None
        else:
            self.cache = OrderedDict()
            self.max_entries = max_entries

        self.name = name
        self.keylen = keylen
        self.sequence = 0
        self.thread = None
        caches_by_name[name] = self.cache
コード例 #17
0
    def test_eviction(self):
        cache = LruCache(2)
        cache[1] = 1
        cache[2] = 2

        self.assertEquals(cache.get(1), 1)
        self.assertEquals(cache.get(2), 2)

        cache[3] = 3

        self.assertEquals(cache.get(1), None)
        self.assertEquals(cache.get(2), 2)
        self.assertEquals(cache.get(3), 3)
コード例 #18
0
ファイル: test_cache.py プロジェクト: samuelyi/synapse
    def test_individual_instantiated_after_config_load(self):
        """
        If a cache is instantiated after the config is read, it will be
        immediately resized to the correct size given the per_cache_factor if
        there is one.
        """
        config = {"caches": {"per_cache_factors": {"foo": 2}}}
        self.config.read_config(config, config_dir_path="", data_dir_path="")
        self.config.resize_all_caches()

        cache = LruCache(100)
        add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor)
        self.assertEqual(cache.max_size, 200)
コード例 #19
0
ファイル: deferred_cache.py プロジェクト: samuelyi/synapse
    def __init__(
        self,
        name: str,
        max_entries: int = 1000,
        tree: bool = False,
        iterable: bool = False,
        apply_cache_factor_from_config: bool = True,
        prune_unread_entries: bool = True,
    ):
        """
        Args:
            name: The name of the cache
            max_entries: Maximum amount of entries that the cache will hold
            tree: Use a TreeCache instead of a dict as the underlying cache type
            iterable: If True, count each item in the cached object as an entry,
                rather than each cached object
            apply_cache_factor_from_config: Whether cache factors specified in the
                config file affect `max_entries`
            prune_unread_entries: If True, cache entries that haven't been read recently
                will be evicted from the cache in the background. Set to False to
                opt-out of this behaviour.
        """
        cache_type = TreeCache if tree else dict

        # _pending_deferred_cache maps from the key value to a `CacheEntry` object.
        self._pending_deferred_cache: Union[
            TreeCache, "MutableMapping[KT, CacheEntry]"
        ] = cache_type()

        def metrics_cb() -> None:
            cache_pending_metric.labels(name).set(len(self._pending_deferred_cache))

        # cache is used for completed results and maps to the result itself, rather than
        # a Deferred.
        self.cache: LruCache[KT, VT] = LruCache(
            max_size=max_entries,
            cache_name=name,
            cache_type=cache_type,
            size_callback=(
                (lambda d: len(cast(Sized, d)) or 1)
                # Argument 1 to "len" has incompatible type "VT"; expected "Sized"
                # We trust that `VT` is `Sized` when `iterable` is `True`
                if iterable
                else None
            ),
            metrics_collection_callback=metrics_cb,
            apply_cache_factor_from_config=apply_cache_factor_from_config,
            prune_unread_entries=prune_unread_entries,
        )

        self.thread: Optional[threading.Thread] = None
コード例 #20
0
    def __init__(self, name, max_entries=1000, keylen=1, tree=False, iterable=False):
        cache_type = TreeCache if tree else dict
        self._pending_deferred_cache = cache_type()

        self.cache = LruCache(
            max_size=max_entries, keylen=keylen, cache_type=cache_type,
            size_callback=(lambda d: len(d)) if iterable else None,
        )

        self.name = name
        self.keylen = keylen
        self.sequence = 0
        self.thread = None
        self.metrics = register_cache(name, self.cache)
コード例 #21
0
    def __init__(self, name, max_entries=1000):
        self.cache = LruCache(max_size=max_entries, size_callback=len)

        self.name = name
        self.sequence = 0
        self.thread = None

        # caches_by_name[name] = self.cache

        class Sentinel(object):
            __slots__ = []

        self.sentinel = Sentinel()
        self.metrics = register_cache(name, self.cache)
コード例 #22
0
ファイル: auth.py プロジェクト: mjvaldez/synapse
    def __init__(self, hs):
        self.hs = hs
        self.clock = hs.get_clock()
        self.store = hs.get_datastore()
        self.state = hs.get_state_handler()

        self.token_cache = LruCache(10000)
        register_cache("cache", "token_cache", self.token_cache)

        self._auth_blocking = AuthBlocking(self.hs)

        self._account_validity = hs.config.account_validity
        self._track_appservice_user_ips = hs.config.track_appservice_user_ips
        self._macaroon_secret_key = hs.config.macaroon_secret_key
コード例 #23
0
ファイル: client_ips.py プロジェクト: vadimzakirov/synapse
    def __init__(self, database: DatabasePool, db_conn, hs):

        self.client_ip_last_seen = LruCache(cache_name="client_ip_last_seen",
                                            max_size=50000)

        super().__init__(database, db_conn, hs)

        # (user_id, access_token, ip,) -> (user_agent, device_id, last_seen)
        self._batch_row_update = {}

        self._client_ip_looper = self._clock.looping_call(
            self._update_client_ips_batch, 5 * 1000)
        self.hs.get_reactor().addSystemEventTrigger(
            "before", "shutdown", self._update_client_ips_batch)
コード例 #24
0
async def main(reactor, loops):
    """
    Benchmark `loops` number of insertions into LruCache without eviction.
    """
    cache = LruCache(loops)

    start = perf_counter()

    for i in range(loops):
        cache[i] = True

    end = perf_counter() - start

    return end
コード例 #25
0
    def test_clear(self):
        m1 = Mock()
        m2 = Mock()
        cache = LruCache(5)

        cache.set("key1", "value", callbacks=[m1])
        cache.set("key2", "value", callbacks=[m2])

        self.assertEquals(m1.call_count, 0)
        self.assertEquals(m2.call_count, 0)

        cache.clear()

        self.assertEquals(m1.call_count, 1)
        self.assertEquals(m2.call_count, 1)
コード例 #26
0
    def test_pop(self):
        m = Mock()
        cache = LruCache(1)

        cache.set("key", "value", callbacks=[m])
        self.assertFalse(m.called)

        cache.pop("key")
        self.assertEquals(m.call_count, 1)

        cache.set("key", "value")
        self.assertEquals(m.call_count, 1)

        cache.pop("key")
        self.assertEquals(m.call_count, 1)
コード例 #27
0
ファイル: test_cache.py プロジェクト: yvwvnacb/synapse
    def test_apply_cache_factor_from_config(self):
        """Caches can disable applying cache factor updates, mainly used by
        event cache size.
        """

        config = {"caches": {"event_cache_size": "10k"}}
        t = TestConfig()
        t.read_config(config, config_dir_path="", data_dir_path="")

        cache = LruCache(
            max_size=t.caches.event_cache_size, apply_cache_factor_from_config=False,
        )
        add_resizable_cache("event_cache", cache_resize_callback=cache.set_cache_factor)

        self.assertEqual(cache.max_size, 10240)
コード例 #28
0
ファイル: test_cache.py プロジェクト: samuelyi/synapse
    def test_global_instantiated_before_config_load(self):
        """
        If a cache is instantiated before the config is read, it will be given
        the default cache size in the interim, and then resized to the new
        default cache size once the config is loaded.
        """
        cache = LruCache(100)
        add_resizable_cache("foo", cache_resize_callback=cache.set_cache_factor)
        self.assertEqual(cache.max_size, 50)

        config = {"caches": {"global_factor": 4}}
        self.config.read_config(config, config_dir_path="", data_dir_path="")
        self.config.resize_all_caches()

        self.assertEqual(cache.max_size, 400)
コード例 #29
0
    def __init__(self, hs: "HomeServer"):
        self.hs = hs
        self.clock = hs.get_clock()
        self.store = hs.get_datastore()
        self.state = hs.get_state_handler()
        self._account_validity_handler = hs.get_account_validity_handler()

        self.token_cache: LruCache[str, Tuple[str, bool]] = LruCache(
            10000, "token_cache")

        self._auth_blocking = AuthBlocking(self.hs)

        self._track_appservice_user_ips = hs.config.track_appservice_user_ips
        self._macaroon_secret_key = hs.config.macaroon_secret_key
        self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
コード例 #30
0
ファイル: test_lrucache.py プロジェクト: tetratorus/synapse
    def test_set(self):
        m = Mock()
        cache = LruCache(1)

        cache.set("key", "value", m)
        self.assertFalse(m.called)

        cache.set("key", "value")
        self.assertFalse(m.called)

        cache.set("key", "value2")
        self.assertEquals(m.call_count, 1)

        cache.set("key", "value")
        self.assertEquals(m.call_count, 1)