def execute_command(func,self,*args,**options): command = args[0] start = datetime.now() result = func(self,*args,**options) stop = datetime.now() duration = ms_from_timedelta(stop - start) #TODO find more better way to get the calling func info calframe = _get_func_info() params = { 'func':calframe[4][3], 'func_path':"{}:{}".format(calframe[4][1],calframe[4][2]), 'command':command, 'result':result, 'start_time':start, 'stop_time':stop, 'duration':duration, 'is_slow':None, } #TODO more better way to loggging? djdt = DebugToolbarMiddleware.get_current() if not djdt: return result logger = djdt.get_panel(RedisDebugPanel) logger.record(**params) return result
def execute(self, sql, params=()): starttime = datetime.now() try: return self.cursor.execute(sql, params) finally: raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params) execution_time = datetime.now() - starttime print sqlparse.format(raw_sql, reindent=True), print ' [%.2fms]' % (ms_from_timedelta(execution_time), ) print
def execute(self, sql, params=()): starttime = datetime.now() try: return self.cursor.execute(sql, params) finally: raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params) execution_time = datetime.now() - starttime print(sqlparse.format(raw_sql, reindent=True),) print(' [%.2fms]' % (ms_from_timedelta(execution_time),)) print()
def record(func, *args, **kwargs): djdt = DebugToolbarMiddleware.get_current() if not djdt: return func(*args, **kwargs) panel = djdt.get_panel(BasePanel) # Get stacktrace if ENABLE_STACKTRACES: stacktrace = tidy_stacktrace(reversed(get_stack())) else: stacktrace = [] # Get template info template_info = None cur_frame = sys._getframe().f_back try: while cur_frame is not None: if cur_frame.f_code.co_name == 'render': node = cur_frame.f_locals['self'] if isinstance(node, Node): template_info = get_template_info(node.source) break cur_frame = cur_frame.f_back except: pass del cur_frame # Find args cache_args = None # first arg is self, do we have another if len(args) > 1: cache_args = args[1] # is it a dictionary (most likely multi) if isinstance(cache_args, dict): # just use it's keys cache_args = cache_args.keys() # the clock starts now start = datetime.now() try: return func(*args, **kwargs) finally: # the clock stops now duration = ms_from_timedelta(datetime.now() - start) call = { 'function': func.__name__, 'args': cache_args, 'duration': duration, 'stacktrace': stacktrace, 'template_info': template_info, } panel.record(**call)
def execute(self, sql, params=()): starttime = datetime.now() try: return self.cursor.execute(sql, params) finally: try: raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params) execution_time = datetime.now() - starttime print sqlparse.format(raw_sql, reindent=True), print " [%.2fms]" % (ms_from_timedelta(execution_time),) print except UnicodeEncodeError: print "UnicodeEncodeError"
def execute(self, sql, params=()): start = datetime.now() try: return self.cursor.execute(sql, params) finally: stop = datetime.now() duration = ms_from_timedelta(stop - start) enable_stacktraces = getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {}).get('ENABLE_STACKTRACES', True) if enable_stacktraces: stacktrace = tidy_stacktrace(reversed(get_stack())) else: stacktrace = [] _params = '' try: _params = json.dumps(map(self._decode, params)) except Exception: pass # object not JSON serializable template_info = None cur_frame = sys._getframe().f_back try: while cur_frame is not None: if cur_frame.f_code.co_name == 'render': node = cur_frame.f_locals['self'] if isinstance(node, Node): template_info = get_template_info(node.source) break cur_frame = cur_frame.f_back except: pass del cur_frame alias = getattr(self.db, 'alias', 'default') conn = self.db.connection # HACK: avoid imports if conn: engine = conn.__class__.__module__.split('.', 1)[0] else: engine = 'unknown' params = { 'engine': engine, 'alias': alias, 'sql': self.db.ops.last_executed_query(self.cursor, sql, self._quote_params(params)), 'duration': duration, 'raw_sql': sql, 'params': _params, 'stacktrace': stacktrace, 'start_time': start, 'stop_time': stop, 'is_slow': (duration > SQL_WARNING_THRESHOLD), 'is_select': sql.lower().strip().startswith('select'), 'template_info': template_info, } if engine == 'psycopg2': # If an erroneous query was ran on the connection, it might # be in a state where checking isolation_level raises an # exception. try: iso_level = conn.isolation_level except conn.InternalError: iso_level = 'unknown' params.update({ 'trans_id': self.logger.get_transaction_id(alias), 'trans_status': conn.get_transaction_status(), 'iso_level': iso_level, 'encoding': conn.encoding, }) # We keep `sql` to maintain backwards compatibility self.logger.record(**params)
def execute(self, sql, params=()): start = datetime.now() try: return self.cursor.execute(sql, params) finally: stop = datetime.now() duration = ms_from_timedelta(stop - start) enable_stacktraces = getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {}).get('ENABLE_STACKTRACES', True) if enable_stacktraces: stacktrace = tidy_stacktrace(reversed(get_stack())) else: stacktrace = [] _params = '' try: _params = json.dumps(map(self._decode, params)) except Exception: pass # object not JSON serializable template_info = None cur_frame = sys._getframe().f_back try: while cur_frame is not None: if cur_frame.f_code.co_name == 'render': node = cur_frame.f_locals['self'] if isinstance(node, Node): template_info = get_template_info(node.source) break cur_frame = cur_frame.f_back except: pass del cur_frame alias = getattr(self.db, 'alias', 'default') conn = self.db.connection # HACK: avoid imports if conn: engine = conn.__class__.__module__.split('.', 1)[0] else: engine = 'unknown' params = { 'engine': engine, 'alias': alias, 'sql': self.db.ops.last_executed_query( self.cursor, sql, self._quote_params(params)), 'duration': duration, 'raw_sql': sql, 'params': _params, 'stacktrace': stacktrace, 'start_time': start, 'stop_time': stop, 'is_slow': (duration > SQL_WARNING_THRESHOLD), 'is_select': sql.lower().strip().startswith('select'), 'template_info': template_info, } if engine == 'psycopg2': # If an erroneous query was ran on the connection, it might # be in a state where checking isolation_level raises an # exception. try: iso_level = conn.isolation_level except conn.InternalError: iso_level = 'unknown' params.update({ 'trans_id': self.logger.get_transaction_id(alias), 'trans_status': conn.get_transaction_status(), 'iso_level': iso_level, 'encoding': conn.encoding, }) # We keep `sql` to maintain backwards compatibility self.logger.record(**params)
def execute(self, sql, params=()): __traceback_hide__ = True start = datetime.now() try: return self.cursor.execute(sql, params) finally: stop = datetime.now() duration = ms_from_timedelta(stop - start) enable_stacktraces = getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {}) \ .get('ENABLE_STACKTRACES', True) if enable_stacktraces: stacktrace = tidy_stacktrace(reversed(get_stack())) else: stacktrace = [] _params = '' try: _params = simplejson.dumps( [force_unicode(x, strings_only=True) for x in params] ) except TypeError: pass # object not JSON serializable template_info = None cur_frame = sys._getframe().f_back try: while cur_frame is not None: if cur_frame.f_code.co_name == 'render': node = cur_frame.f_locals['self'] if isinstance(node, Node): template_info = get_template_info(node.source) break cur_frame = cur_frame.f_back except: pass del cur_frame alias = getattr(self.db, 'alias', 'default') conn = connections[alias].connection # HACK: avoid imports if conn: engine = conn.__class__.__module__.split('.', 1)[0] else: engine = 'unknown' params = { 'engine': engine, 'alias': alias, 'sql': self.db.ops.last_executed_query(self.cursor, sql, self._quote_params(params)), 'duration': duration, 'raw_sql': sql, 'params': _params, 'hash': sha_constructor(settings.SECRET_KEY \ + smart_str(sql) \ + _params).hexdigest(), 'stacktrace': stacktrace, 'start_time': start, 'stop_time': stop, 'is_slow': (duration > SQL_WARNING_THRESHOLD), 'is_select': sql.lower().strip().startswith('select'), 'template_info': template_info, } if engine == 'psycopg2': params.update({ 'trans_id': self.logger.get_transaction_id(alias), 'trans_status': conn.get_transaction_status(), 'iso_level': conn.isolation_level, 'encoding': conn.encoding, }) # We keep `sql` to maintain backwards compatibility self.logger.record(**params)
def execute(self, sql, params=()): __traceback_hide__ = True start = datetime.now() try: return self.cursor.execute(sql, params) finally: stop = datetime.now() duration = ms_from_timedelta(stop - start) enable_stacktraces = getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {}).get('ENABLE_STACKTRACES', True) if enable_stacktraces: stacktrace = tidy_stacktrace(reversed(get_stack())) else: stacktrace = [] _params = '' try: _params = simplejson.dumps( [force_unicode(x, strings_only=True) for x in params] ) except TypeError: pass # object not JSON serializable template_info = None cur_frame = sys._getframe().f_back try: while cur_frame is not None: if cur_frame.f_code.co_name == 'render': node = cur_frame.f_locals['self'] if isinstance(node, Node): template_info = get_template_info(node.source) break cur_frame = cur_frame.f_back except: pass del cur_frame alias = getattr(self.db, 'alias', 'default') conn = connections[alias].connection # HACK: avoid imports if conn: engine = conn.__class__.__module__.split('.', 1)[0] else: engine = 'unknown' params = { 'engine': engine, 'alias': alias, 'sql': self.db.ops.last_executed_query(self.cursor, sql, self._quote_params(params)), 'duration': duration, 'raw_sql': sql, 'params': _params, 'hash': sha1(settings.SECRET_KEY \ + smart_str(sql) \ + _params).hexdigest(), 'stacktrace': stacktrace, 'start_time': start, 'stop_time': stop, 'is_slow': (duration > SQL_WARNING_THRESHOLD), 'is_select': sql.lower().strip().startswith('select'), 'template_info': template_info, } if engine == 'psycopg2': from psycopg2.extensions import TRANSACTION_STATUS_INERROR params.update({ 'trans_id': self.logger.get_transaction_id(alias), 'trans_status': conn.get_transaction_status(), 'iso_level': conn.isolation_level if not conn.get_transaction_status() == TRANSACTION_STATUS_INERROR else "", 'encoding': conn.encoding, }) # We keep `sql` to maintain backwards compatibility self.logger.record(**params)
def execute(self, sql, params=()): start = datetime.now() try: return self.cursor.execute(sql, params) finally: stop = datetime.now() duration = ms_from_timedelta(stop - start) enable_stacktraces = getattr(settings, "DEBUG_TOOLBAR_CONFIG", {}).get("ENABLE_STACKTRACES", True) if enable_stacktraces: stacktrace = tidy_stacktrace(reversed(get_stack())) else: stacktrace = [] _params = "" try: _params = json.dumps(map(self._decode, params)) except Exception: pass # object not JSON serializable template_info = None cur_frame = sys._getframe().f_back try: while cur_frame is not None: if cur_frame.f_code.co_name == "render": node = cur_frame.f_locals["self"] if isinstance(node, Node): template_info = get_template_info(node.source) break cur_frame = cur_frame.f_back except: pass del cur_frame alias = getattr(self.db, "alias", "default") conn = connections[alias].connection # HACK: avoid imports if conn: engine = conn.__class__.__module__.split(".", 1)[0] else: engine = "unknown" params = { "engine": engine, "alias": alias, "sql": self.db.ops.last_executed_query(self.cursor, sql, self._quote_params(params)), "duration": duration, "raw_sql": sql, "params": _params, "hash": sha1(settings.SECRET_KEY + smart_str(sql) + _params).hexdigest(), "stacktrace": stacktrace, "start_time": start, "stop_time": stop, "is_slow": (duration > SQL_WARNING_THRESHOLD), "is_select": sql.lower().strip().startswith("select"), "template_info": template_info, } if engine == "psycopg2": from psycopg2.extensions import TRANSACTION_STATUS_INERROR params.update( { "trans_id": self.logger.get_transaction_id(alias), "trans_status": conn.get_transaction_status(), "iso_level": conn.isolation_level if not conn.get_transaction_status() == TRANSACTION_STATUS_INERROR else "", "encoding": conn.encoding, } ) # We keep `sql` to maintain backwards compatibility self.logger.record(**params)