def test_memory_arg_nested(): " Test memory with a nested function argument." memory = Memory(cachedir=env['dir'], verbose=0) memory.clear(warn=False) accum = {'value': 0} def func_1(): return 1 def func_2(): return 2 def decorator(func_): def decorated(): return func_() return decorated @memory.cache() def run_func(func): accum['value'] += 1 return func() a = run_func(decorator(func_1)) b = run_func(decorator(func_1)) c = run_func(decorator(func_2)) nose.tools.assert_equal(a, 1) nose.tools.assert_equal(b, 1) nose.tools.assert_equal(c, 2) nose.tools.assert_equal(accum['value'], 2)
def test_func_dir(): # Test the creation of the memory cache directory for the function. memory = Memory(cachedir=env["dir"], verbose=0) memory.clear() path = __name__.split(".") path.append("f") path = os.path.join(env["dir"], "joblib", *path) g = memory.cache(f) # Test that the function directory is created on demand yield nose.tools.assert_equal, g._get_func_dir(), path yield nose.tools.assert_true, os.path.exists(path) # Test that the code is stored. # For the following test to be robust to previous execution, we clear # the in-memory store _FUNCTION_HASHES.clear() yield nose.tools.assert_false, g._check_previous_func_code() yield nose.tools.assert_true, os.path.exists(os.path.join(path, "func_code.py")) yield nose.tools.assert_true, g._check_previous_func_code() # Test the robustness to failure of loading previous results. dir, _ = g.get_output_dir(1) a = g(1) yield nose.tools.assert_true, os.path.exists(dir) os.remove(os.path.join(dir, "output.pkl")) yield nose.tools.assert_equal, a, g(1)
def test_memory_warning_collision_detection(): # Check that collisions impossible to detect will raise appropriate # warnings. memory = Memory(cachedir=env['dir'], verbose=0) # For isolation with other tests memory.clear() a1 = eval('lambda x: x') a1 = memory.cache(a1) b1 = eval('lambda x: x+1') b1 = memory.cache(b1) if not hasattr(warnings, 'catch_warnings'): # catch_warnings is new in Python 2.6 return with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") # This is a temporary workaround until we get rid of # inspect.getargspec, see # https://github.com/joblib/joblib/issues/247 warnings.simplefilter("ignore", DeprecationWarning) a1(1) b1(1) a1(0) yield nose.tools.assert_equal, len(w), 2 yield nose.tools.assert_true, \ "cannot detect" in str(w[-1].message).lower()
def test_func_dir(tmpdir): # Test the creation of the memory cache directory for the function. memory = Memory(cachedir=tmpdir.strpath, verbose=0) memory.clear() path = __name__.split('.') path.append('f') path = tmpdir.join('joblib', *path).strpath g = memory.cache(f) # Test that the function directory is created on demand assert g._get_func_dir() == path assert os.path.exists(path) # Test that the code is stored. # For the following test to be robust to previous execution, we clear # the in-memory store _FUNCTION_HASHES.clear() assert not g._check_previous_func_code() assert os.path.exists(os.path.join(path, 'func_code.py')) assert g._check_previous_func_code() # Test the robustness to failure of loading previous results. dir, _ = g.get_output_dir(1) a = g(1) assert os.path.exists(dir) os.remove(os.path.join(dir, 'output.pkl')) assert a == g(1)
def test_func_dir(): # Test the creation of the memory cache directory for the function. memory = Memory(cachedir=env['dir'], verbose=0) memory.clear() path = __name__.split('.') path.append('f') path = os.path.join(env['dir'], 'joblib', *path) g = memory.cache(f) # Test that the function directory is created on demand yield nose.tools.assert_equal, g._get_func_dir(), path yield nose.tools.assert_true, os.path.exists(path) # Test that the code is stored. # For the following test to be robust to previous execution, we clear # the in-memory store _FUNCTION_HASHES.clear() yield nose.tools.assert_false, \ g._check_previous_func_code() yield nose.tools.assert_true, \ os.path.exists(os.path.join(path, 'func_code.py')) yield nose.tools.assert_true, \ g._check_previous_func_code() # Test the robustness to failure of loading previous results. dir, _ = g.get_output_dir(1) a = g(1) yield nose.tools.assert_true, os.path.exists(dir) os.remove(os.path.join(dir, 'output.pkl')) yield nose.tools.assert_equal, a, g(1)
def check_identity_lazy(func, accumulator): """ Given a function and an accumulator (a list that grows every time the function is called), check that the function can be decorated by memory to be a lazy identity. """ # Call each function with several arguments, and check that it is # evaluated only once per argument. memory = Memory(cachedir=env['dir'], verbose=0) memory.clear(warn=False) func = memory.cache(func) for i in range(3): for _ in range(2): yield nose.tools.assert_equal, func(i), i yield nose.tools.assert_equal, len(accumulator), i + 1
def check_identity_lazy(func, accumulator, cachedir): """ Given a function and an accumulator (a list that grows every time the function is called), check that the function can be decorated by memory to be a lazy identity. """ # Call each function with several arguments, and check that it is # evaluated only once per argument. memory = Memory(cachedir=cachedir, verbose=0) memory.clear(warn=False) func = memory.cache(func) for i in range(3): for _ in range(2): assert func(i) == i assert len(accumulator) == i + 1
def test_memory_warning_lambda_collisions(tmpdir): # Check that multiple use of lambda will raise collisions memory = Memory(cachedir=tmpdir.strpath, verbose=0) # For isolation with other tests memory.clear() a = lambda x: x a = memory.cache(a) b = lambda x: x + 1 b = memory.cache(b) with warns(JobLibCollisionWarning) as warninfo: assert a(0) == 0 assert b(1) == 2 assert a(1) == 1 # In recent Python versions, we can retrieve the code of lambdas, # thus nothing is raised assert len(warninfo) == 4
def test_memory_warning_collision_detection(tmpdir): # Check that collisions impossible to detect will raise appropriate # warnings. memory = Memory(cachedir=tmpdir.strpath, verbose=0) # For isolation with other tests memory.clear() a1 = eval('lambda x: x') a1 = memory.cache(a1) b1 = eval('lambda x: x+1') b1 = memory.cache(b1) with warns(JobLibCollisionWarning) as warninfo: a1(1) b1(1) a1(0) assert len(warninfo) == 2 assert "cannot detect" in str(warninfo[0].message).lower()
def test_memory_numpy(): " Test memory with a function with numpy arrays." # Check with memmapping and without. for mmap_mode in (None, "r"): accumulator = list() def n(l=None): accumulator.append(1) return l memory = Memory(cachedir=env["dir"], mmap_mode=mmap_mode, verbose=0) memory.clear(warn=False) cached_n = memory.cache(n) rnd = np.random.RandomState(0) for i in range(3): a = rnd.random_sample((10, 10)) for _ in range(3): yield nose.tools.assert_true, np.all(cached_n(a) == a) yield nose.tools.assert_equal, len(accumulator), i + 1
def test_memory_numpy_check_mmap_mode(tmpdir): """Check that mmap_mode is respected even at the first call""" memory = Memory(cachedir=tmpdir.strpath, mmap_mode='r', verbose=0) memory.clear(warn=False) @memory.cache() def twice(a): return a * 2 a = np.ones(3) b = twice(a) c = twice(a) assert isinstance(c, np.memmap) assert c.mode == 'r' assert isinstance(b, np.memmap) assert b.mode == 'r'
def test_memory_numpy_check_mmap_mode(): """Check that mmap_mode is respected even at the first call""" memory = Memory(cachedir=env['dir'], mmap_mode='r', verbose=0) memory.clear(warn=False) @memory.cache() def twice(a): return a * 2 a = np.ones(3) b = twice(a) c = twice(a) nose.tools.assert_true(isinstance(c, np.memmap)) nose.tools.assert_equal(c.mode, 'r') nose.tools.assert_true(isinstance(b, np.memmap)) nose.tools.assert_equal(b.mode, 'r')
def test_memory_numpy(): " Test memory with a function with numpy arrays." # Check with memmapping and without. for mmap_mode in (None, 'r'): accumulator = list() def n(l=None): accumulator.append(1) return l memory = Memory(cachedir=env['dir'], mmap_mode=mmap_mode, verbose=0) memory.clear(warn=False) cached_n = memory.cache(n) rnd = np.random.RandomState(0) for i in range(3): a = rnd.random_sample((10, 10)) for _ in range(3): yield nose.tools.assert_true, np.all(cached_n(a) == a) yield nose.tools.assert_equal, len(accumulator), i + 1
def test_memory_numpy_check_mmap_mode(): """Check that mmap_mode is respected even at the first call""" memory = Memory(cachedir=env["dir"], mmap_mode="r", verbose=0) memory.clear(warn=False) @memory.cache() def twice(a): return a * 2 a = np.ones(3) b = twice(a) c = twice(a) nose.tools.assert_true(isinstance(c, np.memmap)) nose.tools.assert_equal(c.mode, "r") nose.tools.assert_true(isinstance(b, np.memmap)) nose.tools.assert_equal(b.mode, "r")
def test_memory_warning_lambda_collisions(): # Check that multiple use of lambda will raise collisions memory = Memory(cachedir=env["dir"], verbose=0) # For isolation with other tests memory.clear() a = lambda x: x a = memory.cache(a) b = lambda x: x + 1 b = memory.cache(b) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") nose.tools.assert_equal(0, a(0)) nose.tools.assert_equal(2, b(1)) nose.tools.assert_equal(1, a(1)) # In recent Python versions, we can retrieve the code of lambdas, # thus nothing is raised nose.tools.assert_equal(len(w), 4)
def test_memory_numpy(tmpdir): " Test memory with a function with numpy arrays." # Check with memmapping and without. for mmap_mode in (None, 'r'): accumulator = list() def n(l=None): accumulator.append(1) return l memory = Memory(cachedir=tmpdir.strpath, mmap_mode=mmap_mode, verbose=0) memory.clear(warn=False) cached_n = memory.cache(n) rnd = np.random.RandomState(0) for i in range(3): a = rnd.random_sample((10, 10)) for _ in range(3): assert np.all(cached_n(a) == a) assert len(accumulator) == i + 1
def test_memory_arg_lambda(): " Test memory with a lambda argument." memory = Memory(cachedir=env['dir'], verbose=0) memory.clear(warn=False) accum = {'value': 0} @memory.cache() def run_func(func): accum['value'] += 1 return func() lambda_1 = lambda: 1 lambda_2 = lambda: 2 a = run_func(lambda_1) b = run_func(lambda_1) c = run_func(lambda_2) nose.tools.assert_equal(a, 1) nose.tools.assert_equal(b, 1) nose.tools.assert_equal(c, 2) nose.tools.assert_equal(accum['value'], 2)
def test_memory_warning_lambda_collisions(): # Check that multiple use of lambda will raise collisions memory = Memory(cachedir=env['dir'], verbose=0) # For isolation with other tests memory.clear() a = lambda x: x a = memory.cache(a) b = lambda x: x + 1 b = memory.cache(b) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") # This is a temporary workaround until we get rid of # inspect.getargspec, see # https://github.com/joblib/joblib/issues/247 warnings.simplefilter("ignore", DeprecationWarning) nose.tools.assert_equal(0, a(0)) nose.tools.assert_equal(2, b(1)) nose.tools.assert_equal(1, a(1)) # In recent Python versions, we can retrieve the code of lambdas, # thus nothing is raised nose.tools.assert_equal(len(w), 4)
def test_memory_warning_collision_detection(): # Check that collisions impossible to detect will raise appropriate # warnings. memory = Memory(cachedir=env["dir"], verbose=0) # For isolation with other tests memory.clear() a1 = eval("lambda x: x") a1 = memory.cache(a1) b1 = eval("lambda x: x+1") b1 = memory.cache(b1) if not hasattr(warnings, "catch_warnings"): # catch_warnings is new in Python 2.6 return with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") a1(1) b1(1) a1(0) yield nose.tools.assert_equal, len(w), 2 yield nose.tools.assert_true, "cannot detect" in str(w[-1].message).lower()
def test_clear_memory_with_none_location(): memory = Memory(location=None) memory.clear()
def test_clear_memory_with_none_cachedir(): mem = Memory(cachedir=None) mem.clear()
def test_clear_memory_with_none_cachedir(): memory = Memory(cachedir=None) memory.clear()