def test_func_inspect_errors(): # Check that func_inspect is robust and will work on weird objects assert get_func_name('a'.lower)[-1] == 'lower' assert get_func_code('a'.lower)[1:] == (None, -1) ff = lambda x: x assert get_func_name(ff, win_characters=False)[-1] == '<lambda>' assert get_func_code(ff)[1] == __file__.replace('.pyc', '.py') # Simulate a function defined in __main__ ff.__module__ = '__main__' assert get_func_name(ff, win_characters=False)[-1] == '<lambda>' assert get_func_code(ff)[1] == __file__.replace('.pyc', '.py')
def test_func_inspect_errors(): # Check that func_inspect is robust and will work on weird objects assert get_func_name("a".lower)[-1] == "lower" assert get_func_code("a".lower)[1:] == (None, -1) ff = lambda x: x assert get_func_name(ff, win_characters=False)[-1] == "<lambda>" assert get_func_code(ff)[1] == __file__.replace(".pyc", ".py") # Simulate a function defined in __main__ ff.__module__ = "__main__" assert get_func_name(ff, win_characters=False)[-1] == "<lambda>" assert get_func_code(ff)[1] == __file__.replace(".pyc", ".py")
def test_func_inspect_errors(): # Check that func_inspect is robust and will work on weird objects nose.tools.assert_equal(get_func_name("a".lower)[-1], "lower") nose.tools.assert_equal(get_func_code("a".lower)[1:], (None, -1)) ff = lambda x: x nose.tools.assert_equal(get_func_name(ff, win_characters=False)[-1], "<lambda>") nose.tools.assert_equal(get_func_code(ff)[1], __file__.replace(".pyc", ".py")) # Simulate a function defined in __main__ ff.__module__ = "__main__" nose.tools.assert_equal(get_func_name(ff, win_characters=False)[-1], "<lambda>") nose.tools.assert_equal(get_func_code(ff)[1], __file__.replace(".pyc", ".py"))
def test_func_inspect_errors(): # Check that func_inspect is robust and will work on weird objects nose.tools.assert_equal(get_func_name('a'.lower)[-1], 'lower') nose.tools.assert_equal(get_func_code('a'.lower)[1:], (None, -1)) ff = lambda x: x nose.tools.assert_equal(get_func_name(ff, win_characters=False)[-1], '<lambda>') nose.tools.assert_equal(get_func_code(ff)[1], __file__.replace('.pyc', '.py')) # Simulate a function defined in __main__ ff.__module__ = '__main__' nose.tools.assert_equal(get_func_name(ff, win_characters=False)[-1], '<lambda>') nose.tools.assert_equal(get_func_code(ff)[1], __file__.replace('.pyc', '.py'))
def test_func_inspect_errors(): # Check that func_inspect is robust and will work on weird objects nose.tools.assert_equal(get_func_name('a'.lower)[-1], 'lower') nose.tools.assert_equal(get_func_code('a'.lower)[1:], (None, -1)) ff = lambda x: x nose.tools.assert_equal( get_func_name(ff, win_characters=False)[-1], '<lambda>') nose.tools.assert_equal( get_func_code(ff)[1], __file__.replace('.pyc', '.py')) # Simulate a function defined in __main__ ff.__module__ = '__main__' nose.tools.assert_equal( get_func_name(ff, win_characters=False)[-1], '<lambda>') nose.tools.assert_equal( get_func_code(ff)[1], __file__.replace('.pyc', '.py'))
def test_special_source_encoding(): from joblib.test.test_func_inspect_special_encoding import big5_f func_code, source_file, first_line = get_func_code(big5_f) assert first_line == 5 assert "def big5_f():" in func_code assert "test_func_inspect_special_encoding" in source_file
def test_special_source_encoding(): from joblib.test.test_func_inspect_special_encoding import big5_f func_code, source_file, first_line = get_func_code(big5_f) nose.tools.assert_equal(first_line, 5) nose.tools.assert_true("def big5_f():" in func_code) nose.tools.assert_true("test_func_inspect_special_encoding" in source_file)
def dec(func): # Make hash. The function hash does not consider dependencies. _version = version h = hashlib.sha1() module, name = get_func_name(func) h.update(".".join(module + [name])) h.update("$") if version is None: # No manual version; use the hash of the contents as version src, source_file, lineno = get_func_code(func) _version = base64.b32encode(hashlib.sha1(src).digest()).lower() else: _version = str(version) h.update(_version.encode("UTF-8")) # Store information func.version_info = dict( version=_version, ignore_deps=deps == False, ignore_args=tuple(ignore), digest=h.digest() ) return func
def dec(func): # Make hash. The function hash does not consider dependencies. _version = version h = hashlib.sha1() module, name = get_func_name(func) h.update('.'.join(module + [name])) h.update('$') if version is None: # No manual version; use the hash of the contents as version src, source_file, lineno = get_func_code(func) _version = base64.b32encode(hashlib.sha1(src).digest()).lower() else: _version = str(version) h.update(_version.encode('UTF-8')) # Store information func.version_info = dict(version=_version, ignore_deps=deps == False, ignore_args=tuple(ignore), digest=h.digest()) return func
def _get_code(): from joblib.test.test_func_inspect_special_encoding import big5_f return get_func_code(big5_f)[0]
def _lru_cache_wrapper(user_function, directory, maxsize, typed, _CacheInfo): # Constants shared by all lru cache instances: sentinel = object() # unique object used to signal cache misses make_key = _make_key # build a key from the function arguments PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields source_code = str(get_func_code(user_function)[0]) code_hash = hashlib.sha1(source_code.encode()).hexdigest() module, name = get_func_name(user_function) module_str = '.'.join(module) subdir = f"{module_str}.{name}_{code_hash}" cache = diskcache.Cache(directory=directory + '/' + subdir) hits = misses = 0 full = False cache_get = cache.get # bound method to lookup a key or return None cache_len = cache.__len__ # get cache size without calling len() lock = RLock() # because linkedlist updates aren't threadsafe root = [] # root of the circular doubly linked list root[:] = [root, root, None, None] # initialize by pointing to self if maxsize == 0: def wrapper(*args, **kwds): # No caching -- just a statistics update nonlocal misses misses += 1 result = user_function(*args, **kwds) return result elif maxsize is None: def wrapper(*args, **kwds): # Simple caching without ordering or size limit nonlocal hits, misses key = make_key(args, kwds, typed) result = cache_get(key, sentinel) if result is not sentinel: hits += 1 return result misses += 1 result = user_function(*args, **kwds) cache[key] = result return result else: def wrapper(*args, **kwds): # Size limited caching that tracks accesses by recency nonlocal root, hits, misses, full key = make_key(args, kwds, typed) with lock: link = cache_get(key) if link is not None: # Move the link to the front of the circular queue link_prev, link_next, _key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root hits += 1 return result misses += 1 result = user_function(*args, **kwds) with lock: if key in cache: # Getting here means that this same key was added to the # cache while the lock was released. Since the link # update is already done, we need only return the # computed result and update the count of misses. pass elif full: # Use the old root to store the new key and result. oldroot = root oldroot[KEY] = key oldroot[RESULT] = result # Empty the oldest link and make it the new root. # Keep a reference to the old key and old result to # prevent their ref counts from going to zero during the # update. That will prevent potentially arbitrary object # clean-up code (i.e. __del__) from running while we're # still adjusting the links. root = oldroot[NEXT] oldkey = root[KEY] oldresult = root[RESULT] root[KEY] = root[RESULT] = None # Now update the cache dictionary. del cache[oldkey] # Save the potentially reentrant cache[key] assignment # for last, after the root and links have been put in # a consistent state. cache[key] = oldroot else: # Put result in a new link at the front of the queue. last = root[PREV] link = [last, root, key, result] last[NEXT] = root[PREV] = cache[key] = link # Use the cache_len bound method instead of the len() function # which could potentially be wrapped in an lru_cache itself. full = (cache_len() >= maxsize) return result def cache_info(): """Report cache statistics""" with lock: return _CacheInfo(hits, misses, maxsize, cache_len()) def cache_clear(): """Clear the cache and cache statistics""" nonlocal hits, misses, full with lock: cache.clear() root[:] = [root, root, None, None] hits = misses = 0 full = False wrapper.cache_get = cache_get wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return wrapper