def register_decorators_on_module_funcs(modules, decorators): '''将decorator自动注册到module中的所有函数 函数中设置__nodeco__属性为False或者以下划线开头的名称 则不自动注册任何装饰器 eg: def func(): pass func.__nodeco__ = True ''' if not isinstance(modules, (list, tuple)): modules = [modules] if not isinstance(decorators, (list, tuple)): decorators = [decorators] for m in modules: for funcname, func in vars(m).iteritems(): if (isinstance(func, types.FunctionType) and not funcname.startswith('_') and func.__module__ == m.__name__): if getattr(func, '__nodeco__', False): continue for deco in decorators: app_logger.debug('register %s on %s.%s' % (deco.__name__, m.__name__, funcname)) func = deco(func) vars(m)[funcname] = func
def handle_message(data): method_name = data['notify_method'].lower() if method_name not in ['send_email', 'send_sms']: method_name = 'other_method' task = notify.apply_async(args=[data], queue=method_name) app_logger.debug(u'handle task:%r' % task.id) return task
def notify(self, data): '''根据参数动态发送通知''' notify_module = notify_modules.import_module('tof') notify_method = getattr(notify_module, data['notify_method']) app_logger.debug(u'start call %s.%s' % (notify_module.__name__, notify_method.__name__)) if settings.FAKE_HANDLE_TASK: app_logger.info(u'fake handle message:%r' % data) result = utils.do_fake_task(result=(0, 'fake-result')) else: result = notify_method(data['send_to'], data['title'], data['content']) app_logger.debug(u'%r:notify complete:%r' % (self.request.id, result)) return result
def _connect(self): from utils.log import app_logger retries = 4 sleep = 0.1 for i in range(retries): try: conn = super(AutoReconnectionMySQLDatabase, self)._connect() app_logger.debug( 'AutoReconnectionMySQLDatabase Connected. Connect times:%d' % i) return conn except Exception as e: # if connect error, retry 3 times app_logger.warning('%s. Connect times:%d' % (str(e), i)) time.sleep(sleep) raise e
def wrapper(*func_args, **func_kwargs): if not settings.CACHED_CALL: return func(*func_args, **func_kwargs) if namespace == 'views': if request.method == 'GET': url = urlparse.urlsplit(request.url) key = _generate_key(namespace, tag, url_path=url.path, url_query=url.query) else: return func(*func_args, **func_kwargs) elif namespace == 'funcs': params = '%s&%s' % (str(func_args), str(func_kwargs)) funcname = get_func_name(func) key = _generate_key(namespace, tag, funcname=funcname, params=params) try: data = redis.get(key) except Exception as e: app_logger.exception(e) return func(*func_args, **func_kwargs) else: if data is not None: app_logger.debug(u'data from cache:%r' % key) return cPickle.loads(data) else: start_time = time.time() result = func(*func_args, **func_kwargs) exec_time = (time.time() - start_time) * 1000 if exec_time > cached_over_ms: try: redis.set(key, cPickle.dumps(result), ex=expire) app_logger.debug(u'cached:%r' % key) except Exception as e: app_logger.exception(e) return result
def celery_persist_request_id(headers, **kwargs): request_id = current_request_id() headers['request_id'] = request_id app_logger.debug('Forwarding request_id %r to worker.' % request_id)