def test_offset_limit(): for cache_connector in CACHE_CONNECTORS: attributes = ("a", "b", "c") for offset in range(len(ENTRIES)): for limit in range(len(ENTRIES)): query = Query(attributes=attributes, offset=offset, limit=limit) Log.info(query) result = cache_connector.query(query) Log.info(pformat(result)) assert len(result) == min(limit, len(ENTRIES) - offset),\ "Invalid #entries for %s:\n%s" % (str(query), pformat(result)) expected = [ {k : entry.get(k) for k in attributes} \ for entry in ENTRIES[offset : offset + limit] ] assert result == expected, """ Got : %s\n Expected : %s\n """ % (result, expected)
def test_cache_rebase(): DUMMY_BASE_DIR = "/tmp/.minifold" def check_base_dir(cache_connectors: list, dummy_cache_connectors: list = list()): query = Query() for cache_connector in cache_connectors: cache_filename = cache_connector.make_cache_filename(query) Log.debug(cache_filename) assert cache_filename.startswith(DEFAULT_CACHE_STORAGE_BASE_DIR) for dummy_cache_connector in dummy_cache_connectors: cache_filename = dummy_cache_connector.make_cache_filename(query) Log.debug(cache_filename) assert cache_filename.startswith(DUMMY_BASE_DIR) # CACHE_CONNECTORS should be stored in DEFAULT_CACHE_STORAGE_BASE_DIR. check_base_dir(CACHE_CONNECTORS, []) # We now rebase the default cache directory to DUMMY_BASE_DIR. # Caches newly created should be stored in DUMMY_BASE_DIR but the caches # previously created should remain in their place. Log.info("Setting StorageCacheConnector.base_dir to [%s]" % DUMMY_BASE_DIR) StorageCacheConnector.base_dir = DUMMY_BASE_DIR dummy_cache_connectors = [ cls(EntriesConnector(ENTRIES)) \ for cls in STORAGE_CONNECTOR_CLASSES ] check_base_dir(CACHE_CONNECTORS, dummy_cache_connectors) # We now rebase the default cache directory the standard cache directory. # Caches newly created should be stored in DEFAULT_CACHE_STORAGE_BASE_DIR but the caches # previously created should remain in their place. Log.info("Setting StorageCacheConnector.base_dir to [%s]" % DEFAULT_CACHE_STORAGE_BASE_DIR) StorageCacheConnector.base_dir = DEFAULT_CACHE_STORAGE_BASE_DIR check_base_dir(CACHE_CONNECTORS, dummy_cache_connectors)
def __init__( self, load_entries, cache_filename :str, load_cache, save_cache, read_mode, write_mode, with_cache :bool = True ): loaded_from_cache = False if with_cache: try: with open(cache_filename, read_mode) as f: Log.info("%s: Loading cache from [%s]" % (type(self), cache_filename)) entries = load_cache(f) Log.info("Loaded %d entries" % len(entries)) loaded_from_cache = True except FileNotFoundError: Log.debug("%s: Cache [%s] not found" % (type(self), cache_filename)) pass except Exception as e: Log.debug("%s: Cache [%s] corrupted" % (type(self), cache_filename)) Log.error(e) pass # Parse the input data (if needed) if not loaded_from_cache: entries = load_entries() Log.info("Loaded %d entries" % len(entries)) # Save into cache (if needed) if with_cache and not loaded_from_cache: Log.info("%s: Saving data into cache [%s]" % (type(self), cache_filename)) mkdir(os.path.dirname(cache_filename)) with open(cache_filename, write_mode) as f: save_cache(entries, f) super().__init__(entries)
def test_query_select_where(): query = Query(attributes=["a", "c", "d"], filters=BinaryPredicate(BinaryPredicate("a", "<=", 100), "&&", BinaryPredicate("b", ">", 20))) expected = [{ "a": 100, "c": 300, "d": None }, { "a": 100, "c": None, "d": 400 }] for cache_connector in CACHE_CONNECTORS: Log.info("Clearing cache" + ("-" * 80)) cache_connector.clear_query(query) Log.info("Non-cached query" + ("-" * 80)) Log.info("Check if not cached") assert cache_connector.is_cached(query) == False Log.info("Query") result = cache_connector.query(query) assert result == expected Log.info("Cached query" + ("-" * 80)) Log.info("Check if cached") assert cache_connector.is_cached(query) == True Log.info("Query") result = cache_connector.query(query) assert result == expected