def test_modulepickling_simulate_deleted_cache(tmpdir): """ Tests loading from a cache file after it is deleted. According to macOS `dev docs`__, Note that the system may delete the Caches/ directory to free up disk space, so your app must be able to re-create or download these files as needed. It is possible that other supported platforms treat cache files the same way. __ https://developer.apple.com/library/content/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/FileSystemOverview/FileSystemOverview.html """ grammar = load_grammar() module = 'fake parser' # Create the file path = tmpdir.dirname + '/some_path' with open(path, 'w'): pass io = file_io.FileIO(path) save_module(grammar._hashed, io, module, lines=[]) assert load_module(grammar._hashed, io) == module unlink(_get_hashed_path(grammar._hashed, path)) parser_cache.clear() cached2 = load_module(grammar._hashed, io) assert cached2 is None
def test_cache_last_used_update(diff_cache, use_file_io): p = '/path/last-used' parser_cache.clear() # Clear, because then it's easier to find stuff. parse('somecode', cache=True, path=p) node_cache_item = next(iter(parser_cache.values()))[p] now = time.time() assert node_cache_item.last_used < now if use_file_io: f = _FixedTimeFileIO(p, 'code', node_cache_item.last_used - 10) parse(file_io=f, cache=True, diff_cache=diff_cache) else: parse('somecode2', cache=True, path=p, diff_cache=diff_cache) node_cache_item = next(iter(parser_cache.values()))[p] assert now < node_cache_item.last_used < time.time()
def test_cache_limit(): def cache_size(): return sum(len(v) for v in parser_cache.values()) try: parser_cache.clear() future_node_cache_item = _NodeCacheItem('bla', [], change_time=time.time() + 10e6) old_node_cache_item = _NodeCacheItem('bla', [], change_time=time.time() - 10e4) parser_cache['some_hash_old'] = { '/path/%s' % i: old_node_cache_item for i in range(300) } parser_cache['some_hash_new'] = { '/path/%s' % i: future_node_cache_item for i in range(300) } assert cache_size() == 600 parse('somecode', cache=True, path='/path/somepath') assert cache_size() == 301 finally: parser_cache.clear()
def clear_time_caches(delete_all=False): """ Medi caches many things, that should be completed after each completion finishes. :param delete_all: Deletes also the cache that is normally not deleted, like parser cache, which is important for faster parsing. """ global _time_caches if delete_all: for cache in _time_caches.values(): cache.clear() parser_cache.clear() else: # normally just kill the expired entries, not all for tc in _time_caches.values(): # check time_cache for expired entries for key, (t, value) in list(tc.items()): if t < time.time(): # delete expired entries del tc[key]
def test_modulepickling_change_cache_dir(tmpdir): """ ParserPickling should not save old cache when cache_directory is changed. See: `#168 <https://github.com/davidhalter/jedi/pull/168>`_ """ dir_1 = str(tmpdir.mkdir('first')) dir_2 = str(tmpdir.mkdir('second')) item_1 = _NodeCacheItem('bla', []) item_2 = _NodeCacheItem('bla', []) path_1 = 'fake path 1' path_2 = 'fake path 2' hashed_grammar = load_grammar()._hashed _save_to_file_system(hashed_grammar, path_1, item_1, cache_path=dir_1) parser_cache.clear() cached = load_stored_item(hashed_grammar, path_1, item_1, cache_path=dir_1) assert cached == item_1.node _save_to_file_system(hashed_grammar, path_2, item_2, cache_path=dir_2) cached = load_stored_item(hashed_grammar, path_1, item_1, cache_path=dir_2) assert cached is None