def test_decoder_cache_shrink_threadsafe(monkeypatch, tmpdir): """Tests that shrink handles files deleted by other processes.""" cache_dir = str(tmpdir) solver_mock = SolverMock() another_solver = SolverMock('another_solver') cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(**get_solver_test_args()) limit = cache.get_size() # Ensure differing time stamps (depending on the file system the timestamp # resolution might be as bad as 1 day). for filename in os.listdir(cache.cache_dir): path = os.path.join(cache.cache_dir, filename) timestamp = os.stat(path).st_atime timestamp -= 60 * 60 * 24 * 2 # 2 days os.utime(path, (timestamp, timestamp)) cache.wrap_solver(another_solver)(**get_solver_test_args()) cache_size = cache.get_size_in_bytes() assert cache_size > 0 def raise_file_not_found(*args, **kwargs): raise OSError(errno.ENOENT, "File not found.") monkeypatch.setattr(cache, 'get_size_in_bytes', lambda: cache_size) monkeypatch.setattr('os.stat', raise_file_not_found) monkeypatch.setattr('os.remove', raise_file_not_found) monkeypatch.setattr('os.unlink', raise_file_not_found) cache.shrink(limit)
def test_decoder_cache_shrinking(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() another_solver = SolverMock('another_solver') cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(**get_solver_test_args()) # Ensure differing time stamps (depending on the file system the timestamp # resolution might be as bad as 1 day). for path in cache.get_files(): timestamp = os.stat(path).st_atime timestamp -= 60 * 60 * 24 * 2 # 2 days os.utime(path, (timestamp, timestamp)) cache.wrap_solver(another_solver)(**get_solver_test_args()) cache_size = cache.get_size_in_bytes() assert cache_size > 0 cache.shrink(cache_size - 1) # check that older cached result was removed assert SolverMock.n_calls[solver_mock] == 1 cache.wrap_solver(another_solver)(**get_solver_test_args()) cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 2 assert SolverMock.n_calls[another_solver] == 1
def test_decoder_cache_size_includes_overhead(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(**get_solver_test_args()) fragment_size = get_fragment_size(cache_dir) actual_size = sum(os.stat(p).st_size for p in cache.get_files()) assert actual_size % fragment_size != 0, ( 'Test succeeded by chance. Adjust get_solver_test_args() to produce ' 'date not aligned with the files system fragment size.') assert cache.get_size_in_bytes() % fragment_size == 0
def test_decoder_cache_shrinking(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() another_solver = SolverMock('another_solver') M = 100 N = 10 D = 2 activities = np.ones((M, D)) targets = np.ones((M, N)) rng = np.random.RandomState(42) cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(activities, targets, rng) limit = cache.get_size() # Ensure differing time stamps (depending on the file system the timestamp # resolution might be as bad as 1 day). for filename in os.listdir(cache.cache_dir): path = os.path.join(cache.cache_dir, filename) timestamp = os.stat(path).st_atime timestamp -= 60 * 60 * 24 * 2 # 2 days os.utime(path, (timestamp, timestamp)) cache.wrap_solver(another_solver)(activities, targets, rng) assert cache.get_size_in_bytes() > 0 cache.shrink(limit) # check that older cached result was removed assert SolverMock.n_calls[solver_mock] == 1 cache.wrap_solver(another_solver)(activities, targets, rng) cache.wrap_solver(solver_mock)(activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 2 assert SolverMock.n_calls[another_solver] == 1