Exemplo n.º 1
0
def test_modulepickling_simulate_deleted_cache(tmpdir):
    """
    Tests loading from a cache file after it is deleted.
    According to macOS `dev docs`__,

        Note that the system may delete the Caches/ directory to free up disk
        space, so your app must be able to re-create or download these files as
        needed.

    It is possible that other supported platforms treat cache files the same
    way.

    __ https://developer.apple.com/library/content/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/FileSystemOverview/FileSystemOverview.html
    """
    grammar = load_grammar()
    module = 'fake parser'

    # Create the file
    path = tmpdir.dirname + '/some_path'
    with open(path, 'w'):
        pass
    io = file_io.FileIO(path)

    try_to_save_module(grammar._hashed, io, module, lines=[])
    assert load_module(grammar._hashed, io) == module

    os.unlink(_get_hashed_path(grammar._hashed, path))
    parser_cache.clear()

    cached2 = load_module(grammar._hashed, io)
    assert cached2 is None
Exemplo n.º 2
0
def test_inactive_cache(tmpdir, isolated_parso_cache):
    parser_cache.clear()
    test_subjects = "abcdef"
    for path in test_subjects:
        parse('somecode', cache=True, path=os.path.join(str(tmpdir), path))
    raw_cache_path = os.path.join(isolated_parso_cache, _VERSION_TAG)
    assert os.path.exists(raw_cache_path)
    paths = os.listdir(raw_cache_path)
    a_while_ago = time.time() - _CACHED_FILE_MAXIMUM_SURVIVAL
    old_paths = set()
    for path in paths[:len(test_subjects) //
                      2]:  # make certain number of paths old
        os.utime(os.path.join(raw_cache_path, path),
                 (a_while_ago, a_while_ago))
        old_paths.add(path)
    # nothing should be cleared while the lock is on
    assert os.path.exists(_get_cache_clear_lock().path)
    _remove_cache_and_update_lock()  # it shouldn't clear anything
    assert len(os.listdir(raw_cache_path)) == len(test_subjects)
    assert old_paths.issubset(os.listdir(raw_cache_path))

    os.utime(_get_cache_clear_lock().path, (a_while_ago, a_while_ago))
    _remove_cache_and_update_lock()
    assert len(os.listdir(raw_cache_path)) == len(test_subjects) // 2
    assert not old_paths.intersection(os.listdir(raw_cache_path))
Exemplo n.º 3
0
def test_modulepickling_simulate_deleted_cache(tmpdir):
    """
    Tests loading from a cache file after it is deleted.
    According to macOS `dev docs`__,

        Note that the system may delete the Caches/ directory to free up disk
        space, so your app must be able to re-create or download these files as
        needed.

    It is possible that other supported platforms treat cache files the same
    way.

    __ https://developer.apple.com/library/content/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/FileSystemOverview/FileSystemOverview.html
    """
    grammar = load_grammar()
    module = 'fake parser'

    # Create the file
    path = tmpdir.dirname + '/some_path'
    with open(path, 'w'):
        pass

    save_module(grammar._hashed, path, module, [])
    assert load_module(grammar._hashed, path) == module

    unlink(_get_hashed_path(grammar._hashed, path))
    parser_cache.clear()

    cached2 = load_module(grammar._hashed, path)
    assert cached2 is None
Exemplo n.º 4
0
def test_permission_error(monkeypatch):
    def save(*args, **kwargs):
        nonlocal was_called
        was_called = True
        raise PermissionError

    was_called = False

    monkeypatch.setattr(cache, '_save_to_file_system', save)
    try:
        with pytest.warns(Warning):
            parse(path=__file__, cache=True, diff_cache=True)
        assert was_called
    finally:
        parser_cache.clear()
Exemplo n.º 5
0
def test_cache_last_used_update(diff_cache, use_file_io):
    p = '/path/last-used'
    parser_cache.clear()  # Clear, because then it's easier to find stuff.
    parse('somecode', cache=True, path=p)
    node_cache_item = next(iter(parser_cache.values()))[p]
    now = time.time()
    assert node_cache_item.last_used < now

    if use_file_io:
        f = _FixedTimeFileIO(p, 'code', node_cache_item.last_used - 10)
        parse(file_io=f, cache=True, diff_cache=diff_cache)
    else:
        parse('somecode2', cache=True, path=p, diff_cache=diff_cache)

    node_cache_item = next(iter(parser_cache.values()))[p]
    assert now < node_cache_item.last_used < time.time()
Exemplo n.º 6
0
def test_cache_limit():
    def cache_size():
        return sum(len(v) for v in parser_cache.values())

    try:
        parser_cache.clear()
        future_node_cache_item = _NodeCacheItem('bla', [], change_time=time.time() + 10e6)
        old_node_cache_item = _NodeCacheItem('bla', [], change_time=time.time() - 10e4)
        parser_cache['some_hash_old'] = {
            '/path/%s' % i: old_node_cache_item for i in range(300)
        }
        parser_cache['some_hash_new'] = {
            '/path/%s' % i: future_node_cache_item for i in range(300)
        }
        assert cache_size() == 600
        parse('somecode', cache=True, path='/path/somepath')
        assert cache_size() == 301
    finally:
        parser_cache.clear()
Exemplo n.º 7
0
def clear_time_caches(delete_all=False):
    """ Jedi caches many things, that should be completed after each completion
    finishes.

    :param delete_all: Deletes also the cache that is normally not deleted,
        like parser cache, which is important for faster parsing.
    """
    global _time_caches

    if delete_all:
        for cache in _time_caches.values():
            cache.clear()
        parser_cache.clear()
    else:
        # normally just kill the expired entries, not all
        for tc in _time_caches.values():
            # check time_cache for expired entries
            for key, (t, value) in list(tc.items()):
                if t < time.time():
                    # delete expired entries
                    del tc[key]
Exemplo n.º 8
0
def clear_time_caches(delete_all=False):
    """ Jedi caches many things, that should be completed after each completion
    finishes.

    :param delete_all: Deletes also the cache that is normally not deleted,
        like parser cache, which is important for faster parsing.
    """
    global _time_caches

    if delete_all:
        for cache in _time_caches.values():
            cache.clear()
        parser_cache.clear()
    else:
        # normally just kill the expired entries, not all
        for tc in _time_caches.values():
            # check time_cache for expired entries
            for key, (t, value) in list(tc.items()):
                if t < time.time():
                    # delete expired entries
                    del tc[key]
Exemplo n.º 9
0
def test_modulepickling_change_cache_dir(tmpdir):
    """
    ParserPickling should not save old cache when cache_directory is changed.

    See: `#168 <https://github.com/davidhalter/jedi/pull/168>`_
    """
    dir_1 = str(tmpdir.mkdir('first'))
    dir_2 = str(tmpdir.mkdir('second'))

    item_1 = _NodeCacheItem('bla', [])
    item_2 = _NodeCacheItem('bla', [])
    path_1 = 'fake path 1'
    path_2 = 'fake path 2'

    hashed_grammar = load_grammar()._hashed
    _save_to_file_system(hashed_grammar, path_1, item_1, cache_path=dir_1)
    parser_cache.clear()
    cached = load_stored_item(hashed_grammar, path_1, item_1, cache_path=dir_1)
    assert cached == item_1.node

    _save_to_file_system(hashed_grammar, path_2, item_2, cache_path=dir_2)
    cached = load_stored_item(hashed_grammar, path_1, item_1, cache_path=dir_2)
    assert cached is None
Exemplo n.º 10
0
def test_modulepickling_change_cache_dir(tmpdir):
    """
    ParserPickling should not save old cache when cache_directory is changed.

    See: `#168 <https://github.com/davidhalter/jedi/pull/168>`_
    """
    dir_1 = str(tmpdir.mkdir('first'))
    dir_2 = str(tmpdir.mkdir('second'))

    item_1 = _NodeCacheItem('bla', [])
    item_2 = _NodeCacheItem('bla', [])
    path_1 = 'fake path 1'
    path_2 = 'fake path 2'

    hashed_grammar = load_grammar()._hashed
    _save_to_file_system(hashed_grammar, path_1, item_1, cache_path=dir_1)
    parser_cache.clear()
    cached = load_stored_item(hashed_grammar, path_1, item_1, cache_path=dir_1)
    assert cached == item_1.node

    _save_to_file_system(hashed_grammar, path_2, item_2, cache_path=dir_2)
    cached = load_stored_item(hashed_grammar, path_1, item_1, cache_path=dir_2)
    assert cached is None