def callback_read(self, query :Query) -> tuple: data = None cache_filename = self.make_cache_filename(query) if self.is_cached(query): with open(cache_filename, self.read_mode) as f: data = self.callback_load(f) Log.debug("Cache hit: [%s]" % cache_filename) return data
def check_base_dir(cache_connectors: list, dummy_cache_connectors: list = list()): query = Query() for cache_connector in cache_connectors: cache_filename = cache_connector.make_cache_filename(query) Log.debug(cache_filename) assert cache_filename.startswith(DEFAULT_CACHE_STORAGE_BASE_DIR) for dummy_cache_connector in dummy_cache_connectors: cache_filename = dummy_cache_connector.make_cache_filename(query) Log.debug(cache_filename) assert cache_filename.startswith(DUMMY_BASE_DIR)
def test_minifold_config_loads(): config = Config() config.loads(DEFAULT_MINIFOLD_CONFIG) k1 = "dblp:dagstuhl" k2 = "dblp:uni-trier" assert set(config.keys()) == {k1, k2} from minifold.dblp import DblpConnector Log.debug(Connector.subclasses) dblp1 = config.make_connector(k1) assert isinstance(dblp1, DblpConnector) dblp2 = config.make_connector(k2) assert isinstance(dblp2, DblpConnector)
def test_offset_limit(): entries_connector = EntriesConnector(ENTRIES) attributes = ["a", "b", "c"] for offset in range(len(ENTRIES)): for limit in range(len(ENTRIES)): q = Query(attributes=attributes, offset=offset, limit=limit) Log.debug(q) result = entries_connector.query(q) Log.debug(pformat(result)) assert len(result) == min(limit, len(ENTRIES) - offset),\ "Invalid #entries for %s:\n%s" % (str(q), pformat(result)) expected = [ {k : entry.get(k) for k in attributes} \ for entry in ENTRIES[offset : offset + limit] ] assert result == expected, """ Got : %s\n Expected : %s\n """ % (result, expected)
def __init__( self, load_entries, cache_filename :str, load_cache, save_cache, read_mode, write_mode, with_cache :bool = True ): loaded_from_cache = False if with_cache: try: with open(cache_filename, read_mode) as f: Log.info("%s: Loading cache from [%s]" % (type(self), cache_filename)) entries = load_cache(f) Log.info("Loaded %d entries" % len(entries)) loaded_from_cache = True except FileNotFoundError: Log.debug("%s: Cache [%s] not found" % (type(self), cache_filename)) pass except Exception as e: Log.debug("%s: Cache [%s] corrupted" % (type(self), cache_filename)) Log.error(e) pass # Parse the input data (if needed) if not loaded_from_cache: entries = load_entries() Log.info("Loaded %d entries" % len(entries)) # Save into cache (if needed) if with_cache and not loaded_from_cache: Log.info("%s: Saving data into cache [%s]" % (type(self), cache_filename)) mkdir(os.path.dirname(cache_filename)) with open(cache_filename, write_mode) as f: save_cache(entries, f) super().__init__(entries)
def clear_cache(self): if os.path.exists(self.cache_dir) and os.path.isdir(self.cache_dir): Log.debug("StorageCacheConnector: Removing cache [%s]" % self.cache_dir) rm(self.cache_dir, recursive=True)
def clear_query(self, query :Query): cache_filename = self.make_cache_filename(query) if os.path.exists(cache_filename): Log.debug("StorageCacheConnector: Removing query [%s]" % cache_filename) rm(cache_filename)