Пример #1
0
def test_lrucache_add_past_limit(max_count: int, max_size: int,
                                 test_tx_datacarrier) -> None:
    entries = []
    cache = LRUCache(max_count=max_count, max_size=max_size)
    for i in range(1, 1 + 4):
        k = chr(i).encode()
        v = test_tx_datacarrier
        entries.append((k, v))
        added, removals = cache.set(k, v)
        if i < 4:
            assert len(removals) == 0
        else:
            assert len(removals) == 1
            assert removals[0] == entries[0]
    assert cache.current_size == 3 * SIZEOF_TEST_TX_DATA
    assert cache.hits == 0
    assert cache.misses == 0

    # Test the first entry is a miss.
    cached_value = cache.get(entries[0][0])
    assert cached_value is None
    assert cache.hits == 0
    assert cache.misses == 1

    # Test the other entries are hits.
    for i, (k, v) in enumerate(entries):
        if i == 0:
            continue
        cached_value = cache.get(k)
        assert cached_value == v
        assert cache.hits == i
        assert cache.misses == 1
Пример #2
0
def test_lrucache_add_to_limit(max_count: int, max_size: int,
                               test_tx_datacarrier):
    entries = []
    cache = LRUCache(max_count=max_count, max_size=max_size)
    previous_added_node = cache._root
    for i in range(1, 1 + 3):
        k = chr(i).encode()
        v = test_tx_datacarrier
        entries.append((k, v))
        added, removals = cache.set(k, v)
        assert added
        assert len(removals) == 0
        # The previous recent node was at the end of the chain before the root node. Now the one
        # we just added is. We want to verify that the ordering is as expected and that the
        # directional links are in place.
        current_node = cache._cache.get(k)
        assert current_node.previous == previous_added_node
        assert current_node.next == cache._root
        previous_added_node = current_node
    assert cache.current_size == 3 * SIZEOF_TEST_TX_DATA
    assert cache.hits == 0
    assert cache.misses == 0

    for i, (k, v) in enumerate(entries):
        cached_value = cache.get(k)
        assert cached_value == v
        assert cache.hits == i + 1
        assert cache.misses == 0

    v = cache.get(b'miss')
    assert v is None
    assert cache.misses == 1
Пример #3
0
def test_lrucache_add_past_limit_lru_ordering(max_count: int, max_size: int,
                                              test_tx_small) -> None:
    cache = LRUCache(max_count=max_count, max_size=max_size)
    cache.set(b'1', test_tx_small)
    cache.set(b'2', test_tx_small)
    cache.set(b'3', test_tx_small)
    assert cache.get(b'1') == test_tx_small
    assert cache.get(b'3') == test_tx_small
    added, removals = cache.set(b'4', test_tx_small)
    assert added
    assert removals == [(b'2', test_tx_small)]
    assert cache.get(b'3') == test_tx_small
    added, removals = cache.set(b'5', test_tx_small)
    assert added
    assert removals == [(b'1', test_tx_small)]
    added, removals = cache.set(b'6', test_tx_small)
    assert added
    assert removals == [(b'4', test_tx_small)]
Пример #4
0
def test_lrucache_add_replacement(max_count: int, max_size: int) -> None:
    cache = LRUCache(max_count=max_count, max_size=max_size)
    added, removals = cache.set(b'1', b'2')
    assert added
    assert len(removals) == 0
    added, removals = cache.set(b'1', b'3')
    assert added
    assert removals == [(b'1', b'2')]
    assert cache.get(b'1') == b'3'
Пример #5
0
def test_lrucache_count_empty(max_count: int, max_size: int):
    cache = LRUCache(max_count=max_count, max_size=max_size)
    assert cache.hits == 0
    assert cache.misses == 0
    assert cache.current_size == 0

    v = cache.get(b'2')
    assert v is None
    assert cache.hits == 0
    assert cache.misses == 1
Пример #6
0
def test_lrucache_add_single(max_count: int, max_size: int) -> None:
    k = b'1'
    v = b'2'
    cache = LRUCache(max_count=max_count, max_size=max_size)
    added, removals = cache.set(k, v)
    assert added
    assert len(removals) == 0
    assert cache.current_size == 1

    cached_value = cache.get(k)
    assert cached_value == v
    assert cache.hits == 1
    assert cache.misses == 0

    # Ensure a second fetch works given the order shuffling.
    cached_value = cache.get(k)
    assert cached_value == v
    assert cache.hits == 2
    assert cache.misses == 0
Пример #7
0
def test_lrucache_add_past_limit_lru_ordering(max_count: int,
                                              max_size: int) -> None:
    entries = []
    limit = max_count if max_count is not None else max_size
    cache = LRUCache(max_count=max_count, max_size=max_size)
    cache.set(b'1', b'1')
    cache.set(b'2', b'2')
    cache.set(b'3', b'3')
    assert cache.get(b'1') == b'1'
    assert cache.get(b'3') == b'3'
    added, removals = cache.set(b'4', b'4')
    assert added
    assert removals == [(b'2', b'2')]
    assert cache.get(b'3') == b'3'
    added, removals = cache.set(b'5', b'5')
    assert added
    assert removals == [(b'1', b'1')]
    added, removals = cache.set(b'6', b'6')
    assert added
    assert removals == [(b'4', b'4')]
Пример #8
0
def test_lrucache_size_add_replacement_fails() -> None:
    cache = LRUCache(max_size=10)
    added, removals = cache.set(b'1', b'2' * 10)
    assert added
    assert len(removals) == 0
    assert cache.current_size == 10
    added, removals = cache.set(b'1', b'3' * 11)
    assert not added
    assert removals == [(b'1', b'2' * 10)]
    assert cache.get(b'1') is None
    assert cache.current_size == 0
Пример #9
0
def test_lrucache_size_add_replacement_fails(test_tx_small,
                                             test_tx_datacarrier) -> None:
    cache = LRUCache(max_size=SIZEOF_TEST_TX_SMALL)
    added, removals = cache.set(b'1', test_tx_small)
    assert added
    assert len(removals) == 0
    assert cache.current_size == SIZEOF_TEST_TX_SMALL

    added, removals = cache.set(
        b'1', test_tx_datacarrier)  # same key larger than max size of cache
    assert not added
    assert removals == [(b'1', test_tx_small)]
    assert cache.get(b'1') is None
    assert cache.current_size == 0
Пример #10
0
def test_lrucache_add_to_limit(max_count: int, max_size: int):
    entries = []
    cache = LRUCache(max_count=max_count, max_size=max_size)
    for i in range(1, 1 + 3):
        k = chr(i).encode()
        v = chr(i * 2).encode()
        entries.append((k, v))
        added, removals = cache.set(k, v)
        assert added
        assert len(removals) == 0
    assert cache.current_size == 3
    assert cache.hits == 0
    assert cache.misses == 0

    for i, (k, v) in enumerate(entries):
        cached_value = cache.get(k)
        assert cached_value == v
        assert cache.hits == i + 1
        assert cache.misses == 0

    v = cache.get(b'miss')
    assert v is None
    assert cache.misses == 1