def getter(func): """ Getter decorator for functions which takes as the first input a unique id list and returns a heterogeous list of values """ #func_ = func func_ = default_decorator(func) @ut.accepts_scalar_input @ut.ignores_exc_tb def wrp_getter(*args, **kwargs): #if ut.DEBUG: # print('[IN GETTER] args=%r' % (args,)) # print('[IN GETTER] kwargs=%r' % (kwargs,)) if DEBUG_GETTERS or VERB_CONTROL: print('+------') print('[GET]: ' + get_funcname(func)) funccall_str = ut.func_str(func, args, kwargs, packed=True) print('\n' + funccall_str + '\n') print('L------') return func_(*args, **kwargs) wrp_getter = ut.preserve_sig(wrp_getter, func) #wrp_getter = ut.on_exception_report_input(wrp_getter) return wrp_getter
def ignores_exc_tb_closure(func): if not ut.IGNORE_TRACEBACK: # if the global enforces that we should not ignore anytracebacks # then just return the original function without any modifcation return func #@wraps(func) def wrp_noexectb(*args, **kwargs): try: return func(*args, **kwargs) except Exception: # Define function to reraise with python 2 syntax #exc_type, exc_value, exc_traceback = sys.exc_info() # Code to remove this decorator from traceback # Remove two levels to remove this one as well exc_type, exc_value, exc_traceback = sys.exc_info() try: exc_traceback = exc_traceback.tb_next exc_traceback = exc_traceback.tb_next #exc_traceback = exc_traceback.tb_next except Exception: print('too many reraise') pass raise exc_type, exc_value, exc_traceback if outer_wrapper: wrp_noexectb = ut.preserve_sig(wrp_noexectb, func) return wrp_noexectb
def decor(func): @utool.accepts_scalar_input2([0, 4]) def testwrp(*args, **kwargs): return func(*args, **kwargs) testwrp = utool.preserve_sig(testwrp, func, force=True) return testwrp
def getter(func): """ Getter decorator for functions which takes as the first input a unique id list and returns a heterogeous list of values """ # func_ = func func_ = default_decorator(func) @ut.accepts_scalar_input @ut.ignores_exc_tb def wrp_getter(*args, **kwargs): # if ut.DEBUG: # print('[IN GETTER] args=%r' % (args,)) # print('[IN GETTER] kwargs=%r' % (kwargs,)) if DEBUG_GETTERS or VERB_CONTROL: print("+------") print("[GET]: " + get_funcname(func)) funccall_str = ut.func_str(func, args, kwargs, packed=True) print("\n" + funccall_str + "\n") print("L------") return func_(*args, **kwargs) wrp_getter = ut.preserve_sig(wrp_getter, func) # wrp_getter = ut.on_exception_report_input(wrp_getter) return wrp_getter
def test_decorator_module(): import utool as ut import decorator ut.rrrr() def testdecor(func): @ut.on_exception_report_input @ut.accepts_scalar_input2([0]) @ut.ignores_exc_tb def testwrp(*args, **kwargs): print('was wrapped') return func(*args, **kwargs) return testwrp preserving_testdecor = decorator.decorator(testdecor) def myfunction(self, listinput_, arg1, *args, **kwargs): " just a test function " return [x + 1 for x in listinput_] wrapper = testdecor(myfunction) orig_func = myfunction _wrp_preserve0 = preserving_testdecor(myfunction) _wrp_preserve1 = ut.preserve_sig(wrapper, orig_func, True) _wrp_preserve2 = ut.preserve_sig(wrapper, orig_func, False) print('___') print(ut.get_func_sourcecode(_wrp_preserve0)) print('___') print(ut.get_func_sourcecode(_wrp_preserve1)) print('___') print(ut.get_func_sourcecode(_wrp_preserve2)) print('___') print('---') print(ut.get_docstr(_wrp_preserve0)) print('---') print(ut.get_docstr(_wrp_preserve1)) print('---') print(ut.get_docstr(_wrp_preserve2)) print('---') print(ut.dict_str(_wrp_preserve2._utinfo))
def closure_cache_invalidator(writer_func): """ writer_func is either a setter, deleter, or an adder, something that writes to the database. """ if not API_CACHE and not force: return writer_func def wrp_cache_invalidator(self, *args, **kwargs): # the class must have a table_cache property colscache_ = self.table_cache[tblname] colnames_ = list( six.iterkeys(colscache_)) if colnames is None else colnames if DEBUG_API_CACHE: indenter = ut.Indenter('[%s]' % (tblname, )) indenter.start() print('+------') print( 'INVALIDATING tblname=%r, colnames=%r, rowidx=%r, force=%r' % (tblname, colnames, rowidx, force)) print('self = %r' % (self, )) print('args = %r' % (args, )) print('kwargs = %r' % (kwargs, )) print('colscache_ = ' + ut.dict_str(colscache_, truncate=1)) # Clear the cache of any specified colname # when the invalidator is called if rowidx is None: for colname in colnames_: kwargs_cache_ = colscache_[colname] # We dont know the rowsids so clear everything for cache_ in six.itervalues(kwargs_cache_): cache_.clear() else: rowid_list = args[rowidx] for colname in colnames_: kwargs_cache_ = colscache_[colname] # We know the rowids to delete # iterate over all getter kwargs values for cache_ in six.itervalues(kwargs_cache_): ut.delete_dict_keys(cache_, rowid_list) # Preform set/delete action if DEBUG_API_CACHE: print('After:') print('colscache_ = ' + ut.dict_str(colscache_, truncate=1)) print('L__________') writer_result = writer_func(self, *args, **kwargs) if DEBUG_API_CACHE: indenter.stop() return writer_result wrp_cache_invalidator = ut.preserve_sig(wrp_cache_invalidator, writer_func) return wrp_cache_invalidator
def deleter(func): func_ = default_decorator(func) @ut.accepts_scalar_input @ut.ignores_exc_tb def wrp_deleter(*args, **kwargs): if VERB_CONTROL: print('[DELETE]: ' + get_funcname(func)) builtins.print('\n' + ut.func_str(func, args, kwargs) + '\n') return func_(*args, **kwargs) wrp_deleter = ut.preserve_sig(wrp_deleter, func) return wrp_deleter
def closure_cache_invalidator(writer_func): """ writer_func is either a setter, deleter, or an adder, something that writes to the database. """ if not API_CACHE and not force: return writer_func def wrp_cache_invalidator(self, *args, **kwargs): # the class must have a table_cache property colscache_ = self.table_cache[tblname] colnames_ = list(six.iterkeys(colscache_)) if colnames is None else colnames if DEBUG_API_CACHE: indenter = ut.Indenter("[%s]" % (tblname,)) indenter.start() print("+------") print("INVALIDATING tblname=%r, colnames=%r, rowidx=%r, force=%r" % (tblname, colnames, rowidx, force)) print("self = %r" % (self,)) print("args = %r" % (args,)) print("kwargs = %r" % (kwargs,)) print("colscache_ = " + ut.dict_str(colscache_, truncate=1)) # Clear the cache of any specified colname # when the invalidator is called if rowidx is None: for colname in colnames_: kwargs_cache_ = colscache_[colname] # We dont know the rowsids so clear everything for cache_ in six.itervalues(kwargs_cache_): cache_.clear() else: rowid_list = args[rowidx] for colname in colnames_: kwargs_cache_ = colscache_[colname] # We know the rowids to delete # iterate over all getter kwargs values for cache_ in six.itervalues(kwargs_cache_): ut.delete_dict_keys(cache_, rowid_list) # Preform set/delete action if DEBUG_API_CACHE: print("After:") print("colscache_ = " + ut.dict_str(colscache_, truncate=1)) print("L__________") writer_result = writer_func(self, *args, **kwargs) if DEBUG_API_CACHE: indenter.stop() return writer_result wrp_cache_invalidator = ut.preserve_sig(wrp_cache_invalidator, writer_func) return wrp_cache_invalidator
def getter_numpy_vector_output(func): """ Getter decorator for functions which takes as the first input a unique id list and returns a heterogeous list of values """ #getter_func = getter_vector_output(func) func_ = default_decorator(func) @ut.accepts_numpy @ut.accepts_scalar_input_vector_output @ut.ignores_exc_tb def getter_numpy_vector_wrp(*args, **kwargs): return func_(*args, **kwargs) getter_numpy_vector_wrp = ut.preserve_sig(getter_numpy_vector_wrp, func) return getter_numpy_vector_wrp
def getter_vector_output(func): """ Getter decorator for functions which takes as the first input a unique id list and returns a homogenous list of values """ @ut.accepts_scalar_input_vector_output @ut.ignores_exc_tb def getter_vector_wrp(*args, **kwargs): return func(*args, **kwargs) getter_vector_wrp = ut.preserve_sig(getter_vector_wrp, func) return getter_vector_wrp
def remote_api_wrapper(func): def remote_api_call(ibs, *args, **kwargs): if REMOTE_PROXY_URL is None: return func(ibs, *args, **kwargs) else: co_varnames = func.func_code.co_varnames if co_varnames[0] == 'ibs': co_varnames = tuple(co_varnames[1:]) kwargs_ = dict(zip(co_varnames, args)) kwargs.update(kwargs_) kwargs.pop('ibs', None) return api_remote_ibeis(REMOTE_PROXY_URL, func, REMOTE_PROXY_PORT, **kwargs) remote_api_call = ut.preserve_sig(remote_api_call, func) return remote_api_call
def getter_numpy(func): """ Getter decorator for functions which takes as the first input a unique id list and returns a heterogeous list of values """ # getter_func = getter(func) @ut.accepts_numpy @ut.accepts_scalar_input @ut.ignores_exc_tb def getter_numpy_wrp(*args, **kwargs): return func(*args, **kwargs) getter_numpy_wrp = ut.preserve_sig(getter_numpy_wrp, func) # getter_numpy_wrp = ut.on_exception_report_input(getter_numpy_wrp) return getter_numpy_wrp
def setter(func): func_ = default_decorator(func) @ut.accepts_scalar_input2(argx_list=[0, 1], outer_wrapper=False) @ut.ignores_exc_tb def wrp_setter(*args, **kwargs): if DEBUG_SETTERS or VERB_CONTROL: print('+------') print('[SET]: ' + get_funcname(func)) print('[SET]: called by: ' + ut.get_caller_name(range(1, 7))) funccall_str = ut.func_str(func, args, kwargs, packed=True) print('\n' + funccall_str + '\n') print('L------') #builtins.print('\n' + funccall_str + '\n') #print('set: funcname=%r, args=%r, kwargs=%r' % (get_funcname(func), args, kwargs)) return func_(*args, **kwargs) wrp_setter = ut.preserve_sig(wrp_setter, func) #wrp_setter = ut.on_exception_report_input(wrp_setter) return wrp_setter
def adder(func): func_ = default_decorator(func) @ut.accepts_scalar_input @ut.ignores_exc_tb def wrp_adder(*args, **kwargs): if DEBUG_ADDERS or VERB_CONTROL: print('+------') print('[ADD]: ' + get_funcname(func)) funccall_str = ut.func_str(func, args, kwargs, packed=True) print('\n' + funccall_str + '\n') print('L------') if VERB_CONTROL: print('[ADD]: ' + get_funcname(func)) builtins.print('\n' + ut.func_str(func, args, kwargs) + '\n') return func_(*args, **kwargs) wrp_adder = ut.preserve_sig(wrp_adder, func) #wrp_adder = ut.on_exception_report_input(wrp_adder) return wrp_adder
def closure_getter_cacher(getter_func): if not API_CACHE and not force: # Turn of API Cache return getter_func def debug_cache_hits(ismiss_list, rowid_list): num_miss = sum(ismiss_list) num_total = len(rowid_list) num_hit = num_total - num_miss print('\n[get] %s.%s %d / %d cache hits' % (tblname, colname, num_hit, num_total)) def assert_cache_hits(ibs, ismiss_list, rowid_list, kwargs_hash, **kwargs): cached_rowid_list = ut.filterfalse_items(rowid_list, ismiss_list) cache_ = ibs.table_cache[tblname][colname][kwargs_hash] # Load cached values for each rowid cache_vals_list = ut.dict_take_list(cache_, cached_rowid_list, None) db_vals_list = getter_func(ibs, cached_rowid_list, **kwargs) # Assert everything is valid msg_fmt = ut.codeblock( ''' [assert_cache_hits] tblname = %r [assert_cache_hits] colname = %r [assert_cache_hits] cfgkeys = %r [assert_cache_hits] CACHE INVALID: %r != %r ''' ) msg = msg_fmt % (tblname, colname, cfgkeys, cache_vals_list, db_vals_list, ) try: list1 = cache_vals_list list2 = db_vals_list assert ut.lists_eq(list1, list2), msg #if isinstance(db_vals_list, list): # assert cache_vals_list == db_vals_list, msg #else: # assert np.all(cache_vals_list == db_vals_list), msg except AssertionError as ex: raise ex except Exception as ex2: print(type(cache_vals_list)) print(type(db_vals_list)) ut.printex(ex2) ut.embed() raise if False: #@profile cannot profile this because it is alrady being profiled by def wrp_getter_cacher(ibs, rowid_list, **kwargs): """ Wrapper function that caches rowid values in a dictionary """ # HACK TAKE OUT GETTING DEBUG OUT OF KWARGS debug_ = kwargs.pop('debug', False) if cfgkeys is not None: #kwargs_hash = ut.get_dict_hashid(ut.dict_take_list(kwargs, cfgkeys, None)) kwargs_hash = ut.get_dict_hashid([kwargs.get(key, None) for key in cfgkeys]) #ut.dict_take_list(kwargs, cfgkeys, None)) else: kwargs_hash = None #+---------------------------- # There are 3 levels of caches #+---------------------------- # All caches for this table #colscache_ = ibs.table_cache[tblname] ## All caches for the this column #kwargs_cache_ = colscache_[colname] ## All caches for this kwargs configuration #cache_ = kwargs_cache_[kwargs_hash] cache_ = ibs.table_cache[tblname][colname][kwargs_hash] #L____________________________ # Load cached values for each rowid #vals_list = ut.dict_take_list(cache_, rowid_list, None) vals_list = [cache_.get(rowid, None) for rowid in rowid_list] # Mark rowids with cache misses ismiss_list = [val is None for val in vals_list] if debug or debug_: debug_cache_hits(ismiss_list, rowid_list) #print('[cache_getter] "debug_cache_hits" turned off') # HACK !!! DEBUG THESE GETTERS BY ASSERTING INFORMATION IN CACHE IS CORRECT if ASSERT_API_CACHE: assert_cache_hits(ibs, ismiss_list, rowid_list, kwargs_hash, **kwargs) # END HACK if any(ismiss_list): miss_indices = ut.list_where(ismiss_list) miss_rowids = ut.compress(rowid_list, ismiss_list) # call wrapped function miss_vals = getter_func(ibs, miss_rowids, **kwargs) # overwrite missed output for index, val in zip(miss_indices, miss_vals): vals_list[index] = val # Output write # cache save for rowid, val in zip(miss_rowids, miss_vals): cache_[rowid] = val # Cache write return vals_list else: def handle_cache_misses(ibs, getter_func, rowid_list, ismiss_list, vals_list, cache_, kwargs): miss_indices = ut.list_where(ismiss_list) miss_rowids = ut.compress(rowid_list, ismiss_list) # call wrapped function miss_vals = getter_func(ibs, miss_rowids, **kwargs) # overwrite missed output for index, val in zip(miss_indices, miss_vals): vals_list[index] = val # Output write # cache save for rowid, val in zip(miss_rowids, miss_vals): cache_[rowid] = val # Cache write def wrp_getter_cacher(ibs, rowid_list, **kwargs): """ Wrapper function that caches rowid values in a dictionary """ kwargs.pop('debug', False) kwargs_hash = ( None if cfgkeys is None else ut.get_dict_hashid([kwargs.get(key, None) for key in cfgkeys]) ) # There are 3 levels of caches # All caches for this table, caches for the this column, and caches for this kwargs configuration cache_ = ibs.table_cache[tblname][colname][kwargs_hash] # Load cached values for each rowid vals_list = [cache_.get(rowid, None) for rowid in rowid_list] # Mark rowids with cache misses ismiss_list = [val is None for val in vals_list] # END HACK if any(ismiss_list): handle_cache_misses(ibs, getter_func, rowid_list, ismiss_list, vals_list, cache_, kwargs) return vals_list wrp_getter_cacher = ut.preserve_sig(wrp_getter_cacher, getter_func) return wrp_getter_cacher
def ider(func): """ This function takes returns ids subject to conditions """ ider_func = default_decorator(func) ider_func = ut.preserve_sig(ider_func, func) return ider_func
def closure_getter_cacher(getter_func): if not API_CACHE and not force: # Turn of API Cache return getter_func def debug_cache_hits(ismiss_list, rowid_list): num_miss = sum(ismiss_list) num_total = len(rowid_list) num_hit = num_total - num_miss print("\n[get] %s.%s %d / %d cache hits" % (tblname, colname, num_hit, num_total)) def assert_cache_hits(ibs, ismiss_list, rowid_list, kwargs_hash, **kwargs): cached_rowid_list = ut.filterfalse_items(rowid_list, ismiss_list) cache_ = ibs.table_cache[tblname][colname][kwargs_hash] # Load cached values for each rowid cache_vals_list = ut.dict_take_list(cache_, cached_rowid_list, None) db_vals_list = getter_func(ibs, cached_rowid_list, **kwargs) # Assert everything is valid msg_fmt = ut.codeblock( """ [assert_cache_hits] tblname = %r [assert_cache_hits] colname = %r [assert_cache_hits] cfgkeys = %r [assert_cache_hits] CACHE INVALID: %r != %r """ ) msg = msg_fmt % (tblname, colname, cfgkeys, cache_vals_list, db_vals_list) try: list1 = cache_vals_list list2 = db_vals_list assert ut.lists_eq(list1, list2), msg # if isinstance(db_vals_list, list): # assert cache_vals_list == db_vals_list, msg # else: # assert np.all(cache_vals_list == db_vals_list), msg except AssertionError as ex: raise ex except Exception as ex2: print(type(cache_vals_list)) print(type(db_vals_list)) ut.printex(ex2) ut.embed() raise if False: # @profile cannot profile this because it is alrady being profiled by def wrp_getter_cacher(ibs, rowid_list, **kwargs): """ Wrapper function that caches rowid values in a dictionary """ # HACK TAKE OUT GETTING DEBUG OUT OF KWARGS debug_ = kwargs.pop("debug", False) if cfgkeys is not None: # kwargs_hash = ut.get_dict_hashid(ut.dict_take_list(kwargs, cfgkeys, None)) kwargs_hash = ut.get_dict_hashid([kwargs.get(key, None) for key in cfgkeys]) # ut.dict_take_list(kwargs, cfgkeys, None)) else: kwargs_hash = None # +---------------------------- # There are 3 levels of caches # +---------------------------- # All caches for this table # colscache_ = ibs.table_cache[tblname] ## All caches for the this column # kwargs_cache_ = colscache_[colname] ## All caches for this kwargs configuration # cache_ = kwargs_cache_[kwargs_hash] cache_ = ibs.table_cache[tblname][colname][kwargs_hash] # L____________________________ # Load cached values for each rowid # vals_list = ut.dict_take_list(cache_, rowid_list, None) vals_list = [cache_.get(rowid, None) for rowid in rowid_list] # Mark rowids with cache misses ismiss_list = [val is None for val in vals_list] if debug or debug_: debug_cache_hits(ismiss_list, rowid_list) # print('[cache_getter] "debug_cache_hits" turned off') # HACK !!! DEBUG THESE GETTERS BY ASSERTING INFORMATION IN CACHE IS CORRECT if ASSERT_API_CACHE: assert_cache_hits(ibs, ismiss_list, rowid_list, kwargs_hash, **kwargs) # END HACK if any(ismiss_list): miss_indices = ut.list_where(ismiss_list) miss_rowids = ut.compress(rowid_list, ismiss_list) # call wrapped function miss_vals = getter_func(ibs, miss_rowids, **kwargs) # overwrite missed output for index, val in zip(miss_indices, miss_vals): vals_list[index] = val # Output write # cache save for rowid, val in zip(miss_rowids, miss_vals): cache_[rowid] = val # Cache write return vals_list else: def handle_cache_misses(ibs, getter_func, rowid_list, ismiss_list, vals_list, cache_, kwargs): miss_indices = ut.list_where(ismiss_list) miss_rowids = ut.compress(rowid_list, ismiss_list) # call wrapped function miss_vals = getter_func(ibs, miss_rowids, **kwargs) # overwrite missed output for index, val in zip(miss_indices, miss_vals): vals_list[index] = val # Output write # cache save for rowid, val in zip(miss_rowids, miss_vals): cache_[rowid] = val # Cache write def wrp_getter_cacher(ibs, rowid_list, **kwargs): """ Wrapper function that caches rowid values in a dictionary """ kwargs.pop("debug", False) kwargs_hash = ( None if cfgkeys is None else ut.get_dict_hashid([kwargs.get(key, None) for key in cfgkeys]) ) # There are 3 levels of caches # All caches for this table, caches for the this column, and caches for this kwargs configuration cache_ = ibs.table_cache[tblname][colname][kwargs_hash] # Load cached values for each rowid vals_list = [cache_.get(rowid, None) for rowid in rowid_list] # Mark rowids with cache misses ismiss_list = [val is None for val in vals_list] # END HACK if any(ismiss_list): handle_cache_misses(ibs, getter_func, rowid_list, ismiss_list, vals_list, cache_, kwargs) return vals_list wrp_getter_cacher = ut.preserve_sig(wrp_getter_cacher, getter_func) return wrp_getter_cacher