def test_shrink_does_not_fail_if_lock_cannot_be_acquired(tmpdir): cache = DecoderCache(cache_dir=str(tmpdir)) cache._index._lock.timeout = 1. with cache: cache.wrap_solver(SolverMock())(**get_solver_test_args()) with cache._index._lock: cache.shrink(limit=0)
def test_warns_out_of_context(tmpdir): cache_dir = str(tmpdir) cache = DecoderCache(cache_dir=cache_dir) solver_mock = SolverMock() solver = cache.wrap_solver(solver_mock) with pytest.warns(UserWarning): solver(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1
def test_decoder_cache_shrink_threadsafe(monkeypatch, tmpdir): """Tests that shrink handles files deleted by other processes.""" cache_dir = str(tmpdir) solver_mock = SolverMock() another_solver = SolverMock('another_solver') cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(**get_solver_test_args()) limit = cache.get_size() # Ensure differing time stamps (depending on the file system the timestamp # resolution might be as bad as 1 day). for filename in os.listdir(cache.cache_dir): path = os.path.join(cache.cache_dir, filename) timestamp = os.stat(path).st_atime timestamp -= 60 * 60 * 24 * 2 # 2 days os.utime(path, (timestamp, timestamp)) cache.wrap_solver(another_solver)(**get_solver_test_args()) cache_size = cache.get_size_in_bytes() assert cache_size > 0 def raise_file_not_found(*args, **kwargs): raise OSError(errno.ENOENT, "File not found.") monkeypatch.setattr(cache, 'get_size_in_bytes', lambda: cache_size) monkeypatch.setattr('os.stat', raise_file_not_found) monkeypatch.setattr('os.remove', raise_file_not_found) monkeypatch.setattr('os.unlink', raise_file_not_found) cache.shrink(limit)
def test_decoder_cache_with_E_argument_to_solver(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() cache = DecoderCache(cache_dir=cache_dir) decoders1, solver_info1 = cache.wrap_solver(solver_mock)( **get_weight_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 decoders2, solver_info2 = cache.wrap_solver(solver_mock)( **get_weight_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 # read from cache? assert_equal(decoders1, decoders2) assert solver_info1 == solver_info2
def test_decoder_cache_size_includes_overhead(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(**get_solver_test_args()) fragment_size = get_fragment_size(cache_dir) actual_size = sum(os.stat(p).st_size for p in cache.get_files()) assert actual_size % fragment_size != 0, ( 'Test succeeded by chance. Adjust get_solver_test_args() to produce ' 'date not aligned with the files system fragment size.') assert cache.get_size_in_bytes() % fragment_size == 0
def test_corrupted_decoder_cache(tmpdir): cache_dir = str(tmpdir) cache = DecoderCache(cache_dir=cache_dir) solver_mock = SolverMock() cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 # corrupt the cache for filename in os.listdir(cache_dir): with open(os.path.join(cache_dir, filename), 'w') as f: f.write('corrupted') cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 2
def test_corrupted_decoder_cache_index(tmpdir): cache_dir = str(tmpdir) with DecoderCache(cache_dir=cache_dir): pass # Initialize cache with required files assert len(os.listdir(cache_dir)) == 2 # index, index.lock # Write corrupted index with open(os.path.join(cache_dir, CacheIndex._INDEX), 'w') as f: f.write('(d') # empty dict, but missing '.' at the end # Try to load index with DecoderCache(cache_dir=cache_dir): pass assert len(os.listdir(cache_dir)) == 2 # index, index.lock
def test_decoder_cache(tmpdir): cache_dir = str(tmpdir) # Basic test, that results are cached. with DecoderCache(cache_dir=cache_dir) as cache: solver_mock = SolverMock() decoders1, solver_info1 = cache.wrap_solver(solver_mock)( **get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 decoders2, solver_info2 = cache.wrap_solver(solver_mock)( **get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 # result read from cache? assert_equal(decoders1, decoders2) assert solver_info1 == solver_info2 solver_args = get_solver_test_args() solver_args['gain'] *= 2 decoders3, solver_info3 = cache.wrap_solver(solver_mock)(**solver_args) assert SolverMock.n_calls[solver_mock] == 2 assert np.any(decoders1 != decoders3) # Test that the cache does not load results of another solver. another_solver = SolverMock() cache.wrap_solver(another_solver)(**get_solver_test_args( solver=nengo.solvers.LstsqNoise())) assert SolverMock.n_calls[another_solver] == 1
def build_many_ensembles(cache_dir, RefSimulator): with nengo.Network(seed=1) as model: for _ in range(100): nengo.Connection(nengo.Ensemble(10, 1), nengo.Ensemble(10, 1)) with RefSimulator(model, model=nengo.builder.Model( dt=0.001, decoder_cache=DecoderCache(cache_dir=cache_dir))): pass
def test_decoder_cache(tmpdir): cache_dir = str(tmpdir) M = 100 N = 10 D = 2 activities = np.ones((M, D)) targets = np.ones((M, N)) rng = np.random.RandomState(42) # Basic test, that results are cached. cache = DecoderCache(cache_dir=cache_dir) solver_mock = SolverMock() decoders1, solver_info1 = cache.wrap_solver(solver_mock)(activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 1 decoders2, solver_info2 = cache.wrap_solver(solver_mock)(activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 1 # result read from cache? assert_equal(decoders1, decoders2) assert solver_info1 == solver_info2 decoders3, solver_info3 = cache.wrap_solver(solver_mock)(2 * activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 2 assert np.any(decoders1 != decoders3) # Test that the cache does not load results of another solver. another_solver = SolverMock('another_solver') cache.wrap_solver(another_solver)(activities, targets, rng) assert SolverMock.n_calls[another_solver] == 1
def test_decoder_cache_invalidation(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() # Basic test, that results are cached. cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 cache.invalidate() cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 2
def test_decoder_cache_remove_orphans(tmpdir): cache_dir = str(tmpdir) cache = DecoderCache(cache_dir=cache_dir) assert os.listdir(cache_dir) == [] # Create some files; one matching pair, two mismatches, and a random def touch(fname): path = os.path.join(cache_dir, fname) with open(path, 'w'): os.utime(path, None) files = ['a.npy', 'a.pkl', 'b.npy', 'c.pkl', 'rando.txt'] for f in files: touch(f) assert sorted(os.listdir(cache_dir)) == files cache.remove_orphans() # This should do files.remove('b.npy') files.remove('c.pkl') assert sorted(os.listdir(cache_dir)) == files
def test_cache_works(tmpdir, RefSimulator, seed): cache_dir = str(tmpdir) model = nengo.Network(seed=seed) with model: nengo.Connection(nengo.Ensemble(10, 1), nengo.Ensemble(10, 1)) assert len(os.listdir(cache_dir)) == 0 with RefSimulator(model, model=nengo.builder.Model( dt=0.001, decoder_cache=DecoderCache(cache_dir=cache_dir))): assert len(os.listdir(cache_dir)) == 2 # legacy.txt and *.nco
def test_too_new_decoder_cache_index(tmp_path): cache_dir = str(tmp_path) # Write index with super large version numbers with open(os.path.join(cache_dir, CacheIndex._INDEX), "wb") as f: pickle.dump((1000, 1000), f) with DecoderCache(cache_dir=cache_dir) as cache: solver_mock = SolverMock() cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1
def test_too_new_decoder_cache_index(tmp_path): cache_dir = str(tmp_path) # Write index with super large version numbers with open(os.path.join(cache_dir, CacheIndex._INDEX), "wb") as f: pickle.dump((1000, 1000), f) with pytest.warns(UserWarning, match="could not acquire lock and was deactivated"): with DecoderCache(cache_dir=cache_dir) as cache: solver_mock = SolverMock() cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1
def test_decoder_cache(tmpdir): cache_dir = str(tmpdir) # Basic test, that results are cached. cache = DecoderCache(cache_dir=cache_dir) solver_mock = SolverMock() decoders1, solver_info1 = cache.wrap_solver(solver_mock)( **get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 decoders2, solver_info2 = cache.wrap_solver(solver_mock)( **get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 # result read from cache? assert_equal(decoders1, decoders2) assert solver_info1 == solver_info2 solver_args = get_solver_test_args() solver_args['gain'] *= 2 decoders3, solver_info3 = cache.wrap_solver(solver_mock)(**solver_args) assert SolverMock.n_calls[solver_mock] == 2 assert np.any(decoders1 != decoders3) # Test that the cache does not load results of another solver. another_solver = SolverMock('another_solver') cache.wrap_solver(another_solver)(**get_solver_test_args()) assert SolverMock.n_calls[another_solver] == 1
def test_decoder_cache(tmpdir): cache_dir = str(tmpdir) M = 100 N = 10 D = 2 activities = np.ones((M, D)) targets = np.ones((M, N)) rng = np.random.RandomState(42) # Basic test, that results are cached. cache = DecoderCache(cache_dir=cache_dir) solver_mock = SolverMock() decoders1, solver_info1 = cache.wrap_solver(solver_mock)( activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 1 decoders2, solver_info2 = cache.wrap_solver(solver_mock)( activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 1 # result read from cache? assert_equal(decoders1, decoders2) assert solver_info1 == solver_info2 decoders3, solver_info3 = cache.wrap_solver(solver_mock)( 2 * activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 2 assert np.any(decoders1 != decoders3) # Test that the cache does not load results of another solver. another_solver = SolverMock('another_solver') cache.wrap_solver(another_solver)(activities, targets, rng) assert SolverMock.n_calls[another_solver] == 1
def test_corrupted_decoder_cache(tmpdir): cache_dir = str(tmpdir) M = 100 N = 10 D = 2 activities = np.ones((M, D)) targets = np.ones((M, N)) rng = np.random.RandomState(42) cache = DecoderCache(cache_dir=cache_dir) solver_mock = SolverMock() cache.wrap_solver(solver_mock)(activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 1 # corrupt the cache for filename in os.listdir(cache_dir): with open(os.path.join(cache_dir, filename), 'w') as f: f.write('corrupted') cache.wrap_solver(solver_mock)(activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 2
def test_decoder_cache_with_E_argument_to_solver(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() M = 100 N = 10 N2 = 5 D = 2 activities = np.ones((M, D)) targets = np.ones((M, N)) rng = np.random.RandomState(42) E = np.ones((D, N2)) cache = DecoderCache(cache_dir=cache_dir) decoders1, solver_info1 = cache.wrap_solver(solver_mock)( activities, targets, rng, E=E) assert SolverMock.n_calls[solver_mock] == 1 decoders2, solver_info2 = cache.wrap_solver(solver_mock)( activities, targets, rng, E=E) assert SolverMock.n_calls[solver_mock] == 1 # read from cache? assert_equal(decoders1, decoders2) assert solver_info1 == solver_info2
def test_cache_not_used_without_seed(tmpdir, RefSimulator): cache_dir = str(tmpdir) model = nengo.Network() with model: nengo.Connection(nengo.Ensemble(10, 1), nengo.Ensemble(10, 1)) assert len(os.listdir(cache_dir)) == 0 with RefSimulator(model, model=nengo.builder.Model( dt=0.001, decoder_cache=DecoderCache(cache_dir=cache_dir))): assert len(os.listdir(cache_dir)) == 2 # index, index.lock
def test_decoder_cache_size_includes_overhead(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() with DecoderCache(cache_dir=cache_dir) as cache: cache.wrap_solver(solver_mock)(**get_solver_test_args()) fragment_size = get_fragment_size(cache_dir) actual_size = sum(os.stat(p).st_size for p in cache.get_files()) assert actual_size % fragment_size != 0, ( 'Test succeeded by chance. Adjust get_solver_test_args() to ' 'produce date not aligned with the files system fragment size.') assert cache.get_size_in_bytes() % fragment_size == 0
def test_cache_works(tmp_path, Simulator, seed): cache_dir = str(tmp_path) model = nengo.Network(seed=seed) with model: nengo.Connection(nengo.Ensemble(10, 1), nengo.Ensemble(10, 1)) assert len(os.listdir(cache_dir)) == 0 with Simulator( model, model=nengo.builder.Model( dt=0.001, decoder_cache=DecoderCache(cache_dir=cache_dir)), ): assert len(os.listdir(cache_dir)) == 3 # index, index.lock, and *.nco
def test_readonly_cache(caplog, tmp_path): caplog.set_level(logging.INFO) cache_dir = str(tmp_path) with open(os.path.join(cache_dir, CacheIndex._INDEX), "wb") as f: pickle.dump((CacheIndex.VERSION, pickle.HIGHEST_PROTOCOL), f) pickle.dump({}, f) with DecoderCache(readonly=True, cache_dir=cache_dir) as cache: cache.shrink() with pytest.raises(CacheIOError, match="Cannot invalidate a readonly cache."): cache.invalidate() assert len(caplog.records) == 1 assert caplog.records[0].message == "Tried to shrink a readonly cache."
def test_decoder_cache_shrink_threadsafe(monkeypatch, tmpdir): """Tests that shrink handles files deleted by other processes.""" cache_dir = str(tmpdir) solver_mock = SolverMock() another_solver = SolverMock('another_solver') cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(**get_solver_test_args()) limit = cache.get_size() # Ensure differing time stamps (depending on the file system the timestamp # resolution might be as bad as 1 day). for filename in os.listdir(cache.cache_dir): path = os.path.join(cache.cache_dir, filename) timestamp = os.stat(path).st_atime timestamp -= 60 * 60 * 24 * 2 # 2 days os.utime(path, (timestamp, timestamp)) cache.wrap_solver(another_solver)(**get_solver_test_args()) cache_size = cache.get_size_in_bytes() assert cache_size > 0 def raise_file_not_found(orig_fn): def fn(filename, *args, **kwargs): if filename.endswith('.lock'): return orig_fn(filename, *args, **kwargs) raise OSError(errno.ENOENT, "File not found.") return fn monkeypatch.setattr(cache, 'get_size_in_bytes', lambda: cache_size) monkeypatch.setattr('os.stat', raise_file_not_found(os.stat)) monkeypatch.setattr('os.remove', raise_file_not_found(os.remove)) monkeypatch.setattr('os.unlink', raise_file_not_found(os.unlink)) cache.shrink(limit)
def test_corrupted_decoder_cache(tmp_path): cache_dir = str(tmp_path) with DecoderCache(cache_dir=cache_dir) as cache: solver_mock = SolverMock() cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 # corrupt the cache for path in cache.get_files(): with open(path, "w", encoding="utf-8") as f: f.write("corrupted") cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 2
def test_cache_performance(tmpdir, Simulator, seed): cache_dir = str(tmpdir) model = nengo.Network(seed=seed) with model: nengo.Connection(nengo.Ensemble(2000, 10), nengo.Ensemble(2000, 10)) with Timer() as t_no_cache: Simulator(model, model=nengo.builder.Model(dt=0.001, decoder_cache=NoDecoderCache())) with Timer() as t_cache_miss: Simulator(model, model=nengo.builder.Model( dt=0.001, decoder_cache=DecoderCache(cache_dir=cache_dir))) with Timer() as t_cache_hit: Simulator(model, model=nengo.builder.Model( dt=0.001, decoder_cache=DecoderCache(cache_dir=cache_dir))) assert calc_relative_timer_diff(t_no_cache, t_cache_miss) < 0.1 assert calc_relative_timer_diff(t_cache_hit, t_no_cache) > 0.4
def test_corrupted_decoder_cache(tmpdir): cache_dir = str(tmpdir) with DecoderCache(cache_dir=cache_dir) as cache: solver_mock = SolverMock() cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 # corrupt the cache for path in cache.get_files(): with open(path, 'w') as f: f.write('corrupted') cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 2
def test_decoder_cache_invalidation(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() M = 100 N = 10 D = 2 activities = np.ones((M, D)) targets = np.ones((M, N)) rng = np.random.RandomState(42) # Basic test, that results are cached. cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 1 cache.invalidate() cache.wrap_solver(solver_mock)(activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 2
def test_cache_performance(tmpdir, Simulator): cache_dir = str(tmpdir) model = nengo.Network(seed=1) with model: nengo.Connection(nengo.Ensemble(1500, 10), nengo.Ensemble(1500, 10)) built_model = nengo.builder.Model(dt=0.001, seed=model.seed, decoder_cache=DecoderCache(cache_dir)) with Timer() as t_no_cache: nengo.Simulator(model, caching=False) with Timer() as t_cache_miss: nengo.Simulator(model, model=built_model, caching=True) with Timer() as t_cache_hit: nengo.Simulator(model, model=built_model, caching=True) assert calc_relative_timer_diff(t_no_cache, t_cache_miss) < 0.1 assert calc_relative_timer_diff(t_cache_hit, t_no_cache) > 0.75
def test_corrupted_decoder_cache(tmpdir): cache_dir = str(tmpdir) cache = DecoderCache(cache_dir=cache_dir) solver_mock = SolverMock() cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 1 # corrupt the cache for path in cache.get_files(): with open(path, 'w') as f: f.write('corrupted') cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 2
def test_decoder_cache_shrinking(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() another_solver = SolverMock('another_solver') M = 100 N = 10 D = 2 activities = np.ones((M, D)) targets = np.ones((M, N)) rng = np.random.RandomState(42) cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(activities, targets, rng) limit = cache.get_size() # Ensure differing time stamps (depending on the file system the timestamp # resolution might be as bad as 1 day). for filename in os.listdir(cache.cache_dir): path = os.path.join(cache.cache_dir, filename) timestamp = os.stat(path).st_atime timestamp -= 60 * 60 * 24 * 2 # 2 days os.utime(path, (timestamp, timestamp)) cache.wrap_solver(another_solver)(activities, targets, rng) assert cache.get_size() > 0 cache.shrink(limit) # check that older cached result was removed assert SolverMock.n_calls[solver_mock] == 1 cache.wrap_solver(another_solver)(activities, targets, rng) cache.wrap_solver(solver_mock)(activities, targets, rng) assert SolverMock.n_calls[solver_mock] == 2 assert SolverMock.n_calls[another_solver] == 1
def test_decoder_cache_shrinking(tmpdir): cache_dir = str(tmpdir) solver_mock = SolverMock() another_solver = SolverMock('another_solver') cache = DecoderCache(cache_dir=cache_dir) cache.wrap_solver(solver_mock)(**get_solver_test_args()) # Ensure differing time stamps (depending on the file system the timestamp # resolution might be as bad as 1 day). for path in cache.get_files(): timestamp = os.stat(path).st_atime timestamp -= 60 * 60 * 24 * 2 # 2 days os.utime(path, (timestamp, timestamp)) cache.wrap_solver(another_solver)(**get_solver_test_args()) cache_size = cache.get_size_in_bytes() assert cache_size > 0 cache.shrink(cache_size - 1) # check that older cached result was removed assert SolverMock.n_calls[solver_mock] == 1 cache.wrap_solver(another_solver)(**get_solver_test_args()) cache.wrap_solver(solver_mock)(**get_solver_test_args()) assert SolverMock.n_calls[solver_mock] == 2 assert SolverMock.n_calls[another_solver] == 1