Example #1
0
    def wrapper(self, key, *args, **kwargs):
        stacktrace = tidy_stacktrace(reversed(get_stack()[1:]))
        call = {
            'function': func.__name__,
            'args': repr_value(args),
            'stacktrace': render_stacktrace(stacktrace)
        }
        _get_calls().append(call)

        if isinstance(key, dict):
            call['key'] = key.keys()
        else:
            call['key'] = key

        value = None
        try:
            # the clock starts now
            call['start'] = datetime.now()
            value = func(self, key, *args, **kwargs)
        finally:
            # the clock stops now
            dur = datetime.now() - call['start']
            call['duration'] = ((dur.seconds * 1000)
                + (dur.microseconds / 1000.0))
            if func.__name__.startswith('get'):
                default = kwargs.get('default')
                if value is None or (default is not None and value == default):
                    call['miss'] = 1
                else:
                    call['hit'] = 1

        call['value'] = repr_value(value)
        return value
Example #2
0
    def _store_api_info(self,
                        sender,
                        time_taken=0,
                        method=None,
                        url=None,
                        response=None,
                        args=None,
                        kwargs=None,
                        **kw):

        time_taken *= 1000
        self.total_time += time_taken

        # use debug-toolbar utilities to get & render stacktrace
        # skip last two entries, which are in eulfedora.debug_panel
        if dt_settings.get_config().get('ENABLE_STACKTRACES', False):
            stacktrace = tidy_stacktrace(reversed(get_stack()))[:-2]
        else:
            stacktrace = []

        try:
            method_name = method.__name__.upper()
        except AttributeError:
            method_name = method

        self.api_calls.append({
            'time': time_taken,
            'method': method_name,
            'url': url,
            'args': args,
            'kwargs': kwargs,
            'response': response,
            'stack': render_stacktrace(stacktrace)
        })
Example #3
0
    def wrapped(self, *args, **kwargs):
        t = time.time()
        value = method(self, *args, **kwargs)
        t = time.time() - t

        if dt_settings.CONFIG['ENABLE_STACKTRACES']:
            stacktrace = tidy_stacktrace(reversed(get_stack()))
        else:
            stacktrace = []

        template_info = None
        cur_frame = sys._getframe().f_back
        try:
            while cur_frame is not None:
                if cur_frame.f_code.co_name == 'render':
                    node = cur_frame.f_locals['self']
                    if isinstance(node, Node):
                        template_info = get_template_info(node.source)
                        break
                cur_frame = cur_frame.f_back
        except Exception:
            pass
        del cur_frame
        cache_called.send(sender=self.__class__,
                          time_taken=t,
                          name=method.__name__,
                          return_value=value,
                          args=args,
                          kwargs=kwargs,
                          trace=stacktrace,
                          template_info=template_info,
                          backend=self.cache)
        return value
    def _record(self, method, sql, params):
        start_time = time()
        try:
            return method(sql, params)
        finally:
            stop_time = time()
            duration = (stop_time - start_time) * 1000
            if dt_settings.get_config()["ENABLE_STACKTRACES"]:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ""
            try:
                _params = json.dumps(self._decode(params))
            except TypeError:
                pass  # object not JSON serializable

            template_info = get_template_info()

            alias = getattr(self.db, "alias", "default")
            conn = self.db.connection
            vendor = getattr(conn, "vendor", "unknown")

            params = {
                "vendor": vendor,
                "alias": alias,
                "sql": self.db.ops.last_executed_query(
                    self.cursor, sql, self._quote_params(params)
                ),
                "duration": duration,
                "raw_sql": sql,
                "params": _params,
                "raw_params": params,
                "stacktrace": stacktrace,
                "start_time": start_time,
                "stop_time": stop_time,
                "is_slow": duration > dt_settings.get_config()["SQL_WARNING_THRESHOLD"],
                "is_select": sql.lower().strip().startswith("select"),
                "template_info": template_info,
            }

            if vendor == "postgresql":
                # If an erroneous query was ran on the connection, it might
                # be in a state where checking isolation_level raises an
                # exception.
                try:
                    iso_level = conn.isolation_level
                except conn.InternalError:
                    iso_level = "unknown"
                params.update(
                    {
                        "trans_id": self.logger.get_transaction_id(alias),
                        "trans_status": conn.get_transaction_status(),
                        "iso_level": iso_level,
                        "encoding": conn.encoding,
                    }
                )

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
Example #5
0
    def _store_api_info(self, sender, time_taken=0, method=None, url=None, response=None, args=None, kwargs=None, **kw):

        time_taken *= 1000
        self.total_time += time_taken

        # use debug-toolbar utilities to get & render stacktrace
        # skip last two entries, which are in eulfedora.debug_panel
        if dt_settings.get_config().get("ENABLE_STACKTRACES", False):
            stacktrace = tidy_stacktrace(reversed(get_stack()))[:-2]
        else:
            stacktrace = []

        try:
            method_name = method.__name__.upper()
        except AttributeError:
            method_name = method

        self.api_calls.append(
            {
                "time": time_taken,
                "method": method_name,
                "url": url,
                "args": args,
                "kwargs": kwargs,
                "response": response,
                "stack": render_stacktrace(stacktrace),
            }
        )
Example #6
0
    def wrapper(self, key, *args, **kwargs):
        stacktrace = tidy_stacktrace(reversed(get_stack()[1:]))
        call = {"function": func.__name__, "args": repr_value(args), "stacktrace": render_stacktrace(stacktrace)}
        _get_calls().append(call)

        if isinstance(key, dict):
            call["key"] = key.keys()
        else:
            call["key"] = key

        value = None
        try:
            # the clock starts now
            call["start"] = datetime.now()
            value = func(self, key, *args, **kwargs)
        finally:
            # the clock stops now
            dur = datetime.now() - call["start"]
            call["duration"] = (dur.seconds * 1000) + (dur.microseconds / 1000.0)
            if func.__name__.startswith("get"):
                default = kwargs.get("default")
                if value is None or value == default:
                    call["miss"] = 1
                else:
                    call["hit"] = 1

        call["value"] = repr_value(value)
        return value
Example #7
0
    def wrapped(self, *args, **kwargs):
        t = time.time()
        value = method(self, *args, **kwargs)
        t = time.time() - t

        if dt_settings.CONFIG['ENABLE_STACKTRACES']:
            stacktrace = tidy_stacktrace(reversed(get_stack()))
        else:
            stacktrace = []

        template_info = None
        cur_frame = sys._getframe().f_back
        try:
            while cur_frame is not None:
                if cur_frame.f_code.co_name == 'render':
                    node = cur_frame.f_locals['self']
                    if isinstance(node, Node):
                        template_info = get_template_info(node.source)
                        break
                cur_frame = cur_frame.f_back
        except Exception:
            pass
        del cur_frame
        cache_called.send(sender=self.__class__, time_taken=t,
                          name=method.__name__, return_value=value,
                          args=args, kwargs=kwargs, trace=stacktrace,
                          template_info=template_info, backend=self.cache)
        return value
Example #8
0
 def test_deprecated_functions(self):
     with self.assertWarns(DeprecationWarning):
         stack = get_stack()
     self.assertEqual(stack[0][1], __file__)
     with self.assertWarns(DeprecationWarning):
         stack_trace = tidy_stacktrace(reversed(stack))
     self.assertEqual(stack_trace[-1][0], __file__)
    def wrapper(self, key, *args, **kwargs):
        stacktrace = tidy_stacktrace(reversed(get_stack()[1:]))
        call = {
            'function': func.__name__,
            'args': repr_value(args),
            'stacktrace': render_stacktrace(stacktrace)
        }
        _get_calls().append(call)

        if isinstance(key, dict):
            call['key'] = key.keys()
        else:
            call['key'] = key

        value = None
        try:
            # the clock starts now
            call['start'] = datetime.now()
            value = func(self, key, *args, **kwargs)
        finally:
            # the clock stops now
            dur = datetime.now() - call['start']
            call['duration'] = ((dur.seconds * 1000)
                + (dur.microseconds / 1000.0))
            if func.__name__.startswith('get'):
                default = kwargs.get('default')
                if value is None or value == default:
                    call['miss'] = 1
                else:
                    call['hit'] = 1

        call['value'] = repr_value(value)
        return value
Example #10
0
    def _record_call(self, cache, name, original_method, args, kwargs):
        # Some cache backends implement certain cache methods in terms of other cache
        # methods (e.g. get_or_set() in terms of get() and add()).  In order to only
        # record the calls made directly by the user code, set the _djdt_recording flag
        # here to cause the monkey patched cache methods to skip recording additional
        # calls made during the course of this call.
        cache._djdt_recording = True
        t = time.time()
        value = original_method(*args, **kwargs)
        t = time.time() - t
        cache._djdt_recording = False

        if dt_settings.get_config()["ENABLE_STACKTRACES"]:
            stacktrace = tidy_stacktrace(reversed(get_stack()))
        else:
            stacktrace = []

        self._store_call_info(
            name=name,
            time_taken=t,
            return_value=value,
            args=args,
            kwargs=kwargs,
            trace=stacktrace,
            template_info=get_template_info(),
            backend=cache,
        )
        return value
Example #11
0
 def __init__(self, request, response, kwargs):
     self.request = request
     self.response = response
     self.kwargs = kwargs
     if dt_settings.get_config()['ENABLE_STACKTRACES']:
         self.raw_stacktrace = tidy_stacktrace(reversed(get_stack()[5:]))
     else:
         self.raw_stacktrace = []
Example #12
0
    def _record(self, method, sql, params):
        start_time = time()
        try:
            return method(sql, params)
        finally:
            stop_time = time()
            duration = (stop_time - start_time) * 1000
            if dt_settings.CONFIG['ENABLE_STACKTRACES']:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ''
            try:
                _params = json.dumps(list(map(self._decode, params)))
            except Exception:
                pass  # object not JSON serializable

            template_info = get_template_info()

            alias = getattr(self.db, 'alias', 'default')
            conn = self.db.connection
            vendor = getattr(conn, 'vendor', 'unknown')

            params = {
                'vendor': vendor,
                'alias': alias,
                'sql':
                self.db.ops.last_executed_query(self.cursor, sql, self._quote_params(params)),
                'duration': duration,
                'raw_sql': sql,
                'params': _params,
                'stacktrace': stacktrace,
                'start_time': start_time,
                'stop_time': stop_time,
                'is_slow': duration > dt_settings.CONFIG['SQL_WARNING_THRESHOLD'],
                'is_select': sql.lower().strip().startswith('select'),
                'template_info': template_info,
            }

            if vendor == 'postgresql':
                # If an erroneous query was ran on the connection, it might
                # be in a state where checking isolation_level raises an
                # exception.
                try:
                    iso_level = conn.isolation_level
                except conn.InternalError:
                    iso_level = 'unknown'
                params.update(
                    {
                        'trans_id': self.logger.get_transaction_id(alias),
                        'trans_status': conn.get_transaction_status(),
                        'iso_level': iso_level,
                        'encoding': conn.encoding,
                    }
                )

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
 def record_call(self, name, keys):
     start = time.time()
     yield
     call_time = time.time() - start
     # trim the stack to remove our wrapper methods
     stack = get_stack()[3:]
     trace = render_stacktrace(tidy_stacktrace(reversed(stack)))
     self.calls.append(CallInfo(name, keys, call_time, trace))
     self.total_time += call_time
Example #14
0
    def _record(self, method, sql, params):
        start_time = time()
        try:
            return method(sql, params)
        finally:
            stop_time = time()
            duration = (stop_time - start_time) * 1000
            if dt_settings.get_config()['ENABLE_STACKTRACES']:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ''
            try:
                _params = json.dumps(list(map(self._decode, params)))
            except Exception:
                pass  # object not JSON serializable

            template_info = get_template_info()

            alias = getattr(self.db, 'alias', 'default')
            conn = self.db.connection
            vendor = getattr(conn, 'vendor', 'unknown')

            params = {
                'vendor': vendor,
                'alias': alias,
                'sql': self.db.ops.last_executed_query(
                    self.cursor, sql, self._quote_params(params)),
                'duration': duration,
                'raw_sql': sql,
                'params': _params,
                'stacktrace': stacktrace,
                'start_time': start_time,
                'stop_time': stop_time,
                'is_slow': duration > dt_settings.get_config()['SQL_WARNING_THRESHOLD'],
                'is_select': sql.lower().strip().startswith('select'),
                'template_info': template_info,
            }

            if vendor == 'postgresql':
                # If an erroneous query was ran on the connection, it might
                # be in a state where checking isolation_level raises an
                # exception.
                try:
                    iso_level = conn.isolation_level
                except conn.InternalError:
                    iso_level = 'unknown'
                params.update({
                    'trans_id': self.logger.get_transaction_id(alias),
                    'trans_status': conn.get_transaction_status(),
                    'iso_level': iso_level,
                    'encoding': conn.encoding,
                })

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
    def _make_call_dict(self, depth, method, url, data, headers):
        trace = tidy_stacktrace(reversed(get_stack()))[1:-depth] or []
        data_s = u""

        if data:
            for k, v in data.iteritems():
                data_s += u"%s: %s\n" % (k, v)
        else:
            data_s = u"None"

        return {'method': method, 'url': url, 'trace': trace,
                'data': data_s, 'headers': render_headers(headers)}
Example #16
0
def record(func, *args, **kwargs):
    djdt = DebugToolbarMiddleware.get_current()
    if not djdt:
        return func(*args, **kwargs)

    panel = djdt.get_panel(BasePanel)

    # Get stacktrace
    if ENABLE_STACKTRACES:
        stacktrace = tidy_stacktrace(reversed(get_stack()))
    else:
        stacktrace = []

    # Get template info
    template_info = None
    cur_frame = sys._getframe().f_back
    try:
        while cur_frame is not None:
            if cur_frame.f_code.co_name == 'render':
                node = cur_frame.f_locals['self']
                if isinstance(node, Node):
                    template_info = get_template_info(node.source)
                    break
            cur_frame = cur_frame.f_back
    except:
        pass
    del cur_frame

    # Find args
    cache_args = None
    # first arg is self, do we have another
    if len(args) > 1:
        cache_args = args[1]
        # is it a dictionary (most likely multi)
        if isinstance(cache_args, dict):
            # just use it's keys
            cache_args = cache_args.keys()

    # the clock starts now
    start = datetime.now()
    try:
        return func(*args, **kwargs)
    finally:
        # the clock stops now
        duration = ms_from_timedelta(datetime.now() - start)
        call = {
            'function': func.__name__,
            'args': cache_args,
            'duration': duration,
            'stacktrace': stacktrace,
            'template_info': template_info,
        }
        panel.record(**call)
def record(func, *args, **kwargs):
    djdt = DebugToolbarMiddleware.get_current()
    if not djdt:
        return func(*args, **kwargs)

    panel = djdt.get_panel(BasePanel)

    # Get stacktrace
    if ENABLE_STACKTRACES:
        stacktrace = tidy_stacktrace(reversed(get_stack()))
    else:
        stacktrace = []

    # Get template info
    template_info = None
    cur_frame = sys._getframe().f_back
    try:
        while cur_frame is not None:
            if cur_frame.f_code.co_name == 'render':
                node = cur_frame.f_locals['self']
                if isinstance(node, Node):
                    template_info = get_template_info(node.source)
                    break
            cur_frame = cur_frame.f_back
    except:
        pass
    del cur_frame

    # Find args
    cache_args = None
    # first arg is self, do we have another
    if len(args) > 1:
        cache_args = args[1]
        # is it a dictionary (most likely multi)
        if isinstance(cache_args, dict):
            # just use it's keys
            cache_args = cache_args.keys()

    # the clock starts now
    start = datetime.now()
    try:
        return func(*args, **kwargs)
    finally:
        # the clock stops now
        duration = ms_from_timedelta(datetime.now() - start)
        call = {
            'function': func.__name__,
            'args': cache_args,
            'duration': duration,
            'stacktrace': stacktrace,
            'template_info': template_info,
        }
        panel.record(**call)
    def make_call_dict(self, depth, *args, **kwargs):
        debug_config = getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {})
        enable_stack = debug_config.get('ENABLE_STACKTRACES', True)

        trace = enable_stack and tidy_stacktrace(reversed(get_stack()))[:-depth - 1] or []

        # prepare arguments for display
        arguments = map(repr, args[2:])
        options = map(lambda k, v: "%s=%s" % (k, repr(v)), kwargs.items())

        return {'function': args[0],
                'key': len(args) > 1 and args[1] or '',
                'args': ' , '.join(chain(arguments, options)),
                'trace': trace}
    def wrapped(self, *args, **kwargs):
        t = time.time()
        value = method(self, *args, **kwargs)
        t = time.time() - t

        if dt_settings.get_config()['ENABLE_STACKTRACES']:
            stacktrace = tidy_stacktrace(reversed(get_stack()))
        else:
            stacktrace = []

        template_info = get_template_info()
        cache_called.send(sender=self.__class__, time_taken=t,
                          name=method.__name__, return_value=value,
                          args=args, kwargs=kwargs, trace=stacktrace,
                          template_info=template_info, backend=self.cache)
        return value
Example #20
0
    def wrapped(self, *args, **kwargs):
        t = time.time()
        value = method(self, *args, **kwargs)
        t = time.time() - t

        if dt_settings.CONFIG['ENABLE_STACKTRACES']:
            stacktrace = tidy_stacktrace(reversed(get_stack()))
        else:
            stacktrace = []

        template_info = get_template_info()
        cache_called.send(sender=self.__class__, time_taken=t,
                          name=method.__name__, return_value=value,
                          args=args, kwargs=kwargs, trace=stacktrace,
                          template_info=template_info, backend=self.cache)
        return value
Example #21
0
    def make_call_dict(self, depth, *args, **kwargs):
        debug_config = getattr(settings, 'DEBUG_TOOLBAR_CONFIG', {})
        enable_stack = debug_config.get('ENABLE_STACKTRACES', True)

        trace = enable_stack and tidy_stacktrace(reversed(
            get_stack()))[:-depth - 1] or []

        # prepare arguments for display
        arguments = map(repr, args[2:])
        options = map(lambda k, v: "%s=%s" % (k, repr(v)), kwargs.items())

        return {
            'function': args[0],
            'key': len(args) > 1 and args[1] or '',
            'args': ' , '.join(chain(arguments, options)),
            'trace': trace
        }
Example #22
0
 def __init__(self, method, full_url, path, body, status_code, response,
              duration):
     if not body:
         self.body = ""  # Python 3 TypeError if None
     else:
         self.body = _pretty_json(body)
         if isinstance(self.body, bytes):
             self.body = self.body.decode("ascii", "ignore")
     self.method = method
     self.full_url = full_url
     self.path = path
     self.status_code = status_code
     self.response = _pretty_json(response)
     self.duration = round(duration * 1000, 2)
     self.hash = hashlib.md5(
         self.full_url.encode("ascii", "ignore") +
         self.body.encode("ascii", "ignore")).hexdigest()
     self.stacktrace = tidy_stacktrace(reversed(get_stack()))
Example #23
0
    def __init__(self,
                 domain,
                 response,
                 result,
                 request,
                 start_time=None,
                 end_time=None):
        self.domain = domain
        self.request = request
        self.result = result
        self.response = response
        self.start_time = start_time
        self.end_time = end_time

        if dt_settings.get_config()['ENABLE_STACKTRACES']:
            # cut the first 6 lines, exactly until the original operation call
            self.raw_stacktrace = tidy_stacktrace(reversed(get_stack()[6:]))
        else:
            self.raw_stacktrace = []
Example #24
0
    def _record(self, method, sql, params):
        alias = self.db.alias
        vendor = self.db.vendor

        if vendor == "postgresql":
            # The underlying DB connection (as opposed to Django's wrapper)
            conn = self.db.connection
            initial_conn_status = conn.status

        start_time = time()
        try:
            return method(sql, params)
        finally:
            stop_time = time()
            duration = (stop_time - start_time) * 1000
            if dt_settings.get_config()["ENABLE_STACKTRACES"]:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ""
            try:
                _params = json.dumps(self._decode(params))
            except TypeError:
                pass  # object not JSON serializable
            template_info = get_template_info()

            # Sql might be an object (such as psycopg Composed).
            # For logging purposes, make sure it's str.
            sql = str(sql)

            params = {
                "vendor":
                vendor,
                "alias":
                alias,
                "sql":
                self.db.ops.last_executed_query(self.cursor, sql,
                                                self._quote_params(params)),
                "duration":
                duration,
                "raw_sql":
                sql,
                "params":
                _params,
                "raw_params":
                params,
                "stacktrace":
                stacktrace,
                "start_time":
                start_time,
                "stop_time":
                stop_time,
                "is_slow":
                duration > dt_settings.get_config()["SQL_WARNING_THRESHOLD"],
                "is_select":
                sql.lower().strip().startswith("select"),
                "template_info":
                template_info,
            }

            if vendor == "postgresql":
                # If an erroneous query was ran on the connection, it might
                # be in a state where checking isolation_level raises an
                # exception.
                try:
                    iso_level = conn.isolation_level
                except conn.InternalError:
                    iso_level = "unknown"
                # PostgreSQL does not expose any sort of transaction ID, so it is
                # necessary to generate synthetic transaction IDs here.  If the
                # connection was not in a transaction when the query started, and was
                # after the query finished, a new transaction definitely started, so get
                # a new transaction ID from logger.new_transaction_id().  If the query
                # was in a transaction both before and after executing, make the
                # assumption that it is the same transaction and get the current
                # transaction ID from logger.current_transaction_id().  There is an edge
                # case where Django can start a transaction before the first query
                # executes, so in that case logger.current_transaction_id() will
                # generate a new transaction ID since one does not already exist.
                final_conn_status = conn.status
                if final_conn_status == STATUS_IN_TRANSACTION:
                    if initial_conn_status == STATUS_IN_TRANSACTION:
                        trans_id = self.logger.current_transaction_id(alias)
                    else:
                        trans_id = self.logger.new_transaction_id(alias)
                else:
                    trans_id = None

                params.update({
                    "trans_id": trans_id,
                    "trans_status": conn.get_transaction_status(),
                    "iso_level": iso_level,
                })

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
Example #25
0
    def execute(self, sql, params=()):
        start = datetime.now()
        try:
            return self.cursor.execute(sql, params)
        finally:
            stop = datetime.now()
            duration = ms_from_timedelta(stop - start)
            enable_stacktraces = getattr(settings,
                'DEBUG_TOOLBAR_CONFIG', {}).get('ENABLE_STACKTRACES', True)
            if enable_stacktraces:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ''
            try:
                _params = json.dumps(map(self._decode, params))
            except Exception:
                pass  # object not JSON serializable

            template_info = None
            cur_frame = sys._getframe().f_back
            try:
                while cur_frame is not None:
                    if cur_frame.f_code.co_name == 'render':
                        node = cur_frame.f_locals['self']
                        if isinstance(node, Node):
                            template_info = get_template_info(node.source)
                            break
                    cur_frame = cur_frame.f_back
            except:
                pass
            del cur_frame

            alias = getattr(self.db, 'alias', 'default')
            conn = self.db.connection
            # HACK: avoid imports
            if conn:
                engine = conn.__class__.__module__.split('.', 1)[0]
            else:
                engine = 'unknown'

            params = {
                'engine': engine,
                'alias': alias,
                'sql': self.db.ops.last_executed_query(
                    self.cursor, sql, self._quote_params(params)),
                'duration': duration,
                'raw_sql': sql,
                'params': _params,
                'stacktrace': stacktrace,
                'start_time': start,
                'stop_time': stop,
                'is_slow': (duration > SQL_WARNING_THRESHOLD),
                'is_select': sql.lower().strip().startswith('select'),
                'template_info': template_info,
            }

            if engine == 'psycopg2':
                # If an erroneous query was ran on the connection, it might
                # be in a state where checking isolation_level raises an
                # exception.
                try:
                    iso_level = conn.isolation_level
                except conn.InternalError:
                    iso_level = 'unknown'
                params.update({
                    'trans_id': self.logger.get_transaction_id(alias),
                    'trans_status': conn.get_transaction_status(),
                    'iso_level': iso_level,
                    'encoding': conn.encoding,
                })

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
    def execute(self, sql, params=()):
        __traceback_hide__ = True
        start = datetime.now()
        try:
            return self.cursor.execute(sql, params)
        finally:
            stop = datetime.now()
            duration = ms_from_timedelta(stop - start)
            enable_stacktraces = getattr(settings,
                                        'DEBUG_TOOLBAR_CONFIG', {}) \
                                    .get('ENABLE_STACKTRACES', True)
            if enable_stacktraces:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ''
            try:
                _params = simplejson.dumps(
                        [force_unicode(x, strings_only=True) for x in params]
                            )
            except TypeError:
                pass  # object not JSON serializable

            template_info = None
            cur_frame = sys._getframe().f_back
            try:
                while cur_frame is not None:
                    if cur_frame.f_code.co_name == 'render':
                        node = cur_frame.f_locals['self']
                        if isinstance(node, Node):
                            template_info = get_template_info(node.source)
                            break
                    cur_frame = cur_frame.f_back
            except:
                pass
            del cur_frame

            alias = getattr(self.db, 'alias', 'default')
            conn = connections[alias].connection
            # HACK: avoid imports
            if conn:
                engine = conn.__class__.__module__.split('.', 1)[0]
            else:
                engine = 'unknown'

            params = {
                'engine': engine,
                'alias': alias,
                'sql': self.db.ops.last_executed_query(self.cursor, sql,
                                                self._quote_params(params)),
                'duration': duration,
                'raw_sql': sql,
                'params': _params,
                'hash': sha_constructor(settings.SECRET_KEY \
                                        + smart_str(sql) \
                                        + _params).hexdigest(),
                'stacktrace': stacktrace,
                'start_time': start,
                'stop_time': stop,
                'is_slow': (duration > SQL_WARNING_THRESHOLD),
                'is_select': sql.lower().strip().startswith('select'),
                'template_info': template_info,
            }

            if engine == 'psycopg2':
                params.update({
                    'trans_id': self.logger.get_transaction_id(alias),
                    'trans_status': conn.get_transaction_status(),
                    'iso_level': conn.isolation_level,
                    'encoding': conn.encoding,
                })

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
    def _after_execute(self, conn, clause, multiparams, params, result):
        logger = self.loggers.get(current_thread().ident)
        if not logger:
            return

        try:
            start_time = clause.start_time
        except AttributeError:
            start_time = self.tmp.pop(id(clause))
        stop_time = time()
        duration = (stop_time - start_time) * 1000
        config = dt_settings.get_config()

        try:
            raw_compiled = clause.compile(dialect=self.engine.dialect,
                                          compile_kwargs={})

        except AttributeError:
            try:
                parameters = _distill_params(multiparams, params)
            except InvalidRequestError:
                parameters = []
            raw_sql = " ".join(six.text_type(clause).splitlines())

        else:
            try:
                ctx = CursorlessExecutionContext._init_compiled(
                    self.engine.dialect,
                    conn,
                    conn._Connection__connection,
                    raw_compiled,
                    _distill_params(multiparams, params),
                )
            except Exception:
                parameters = []
                raw_sql = " ".join(six.text_type(clause).splitlines())
            else:
                parameters = ctx.parameters
                raw_sql = " ".join(ctx.statement.splitlines())

        try:
            sql = " ".join(
                six.text_type(
                    clause.compile(dialect=self.engine.dialect,
                                   compile_kwargs={"literal_binds":
                                                   True})).splitlines())
        except (CompileError, TypeError, NotImplementedError, AttributeError):
            # not all queries support literal_binds
            sql = raw_sql

        if config["ENABLE_STACKTRACES"]:
            stacktrace = tidy_stacktrace(reversed(get_stack()))
        else:
            stacktrace = []

        template_info = get_template_info()

        params = {
            "vendor":
            conn.dialect.name,
            "alias":
            self.alias,
            "sql":
            sql,
            "duration":
            duration,
            "raw_sql":
            raw_sql,
            "params":
            json.dumps([
                list(i) if isinstance(i, (list, tuple)) else i
                for i in parameters if i
            ]),
            'raw_params':
            tuple(
                tuple(i.items() if isinstance(i, dict) else i)
                for i in parameters),
            "stacktrace":
            stacktrace,
            "start_time":
            start_time,
            "stop_time":
            stop_time,
            "is_slow":
            duration > config["SQL_WARNING_THRESHOLD"],
            "is_select":
            sql.lower().strip().startswith("select"),
            "template_info":
            template_info,
        }

        logger.record(**params)

        return params
Example #28
0
    def execute(self, sql, params=()):
        start = datetime.now()
        try:
            return self.cursor.execute(sql, params)
        finally:
            stop = datetime.now()
            duration = ms_from_timedelta(stop - start)
            enable_stacktraces = getattr(settings, 'DEBUG_TOOLBAR_CONFIG',
                                         {}).get('ENABLE_STACKTRACES', True)
            if enable_stacktraces:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ''
            try:
                _params = json.dumps(map(self._decode, params))
            except Exception:
                pass  # object not JSON serializable

            template_info = None
            cur_frame = sys._getframe().f_back
            try:
                while cur_frame is not None:
                    if cur_frame.f_code.co_name == 'render':
                        node = cur_frame.f_locals['self']
                        if isinstance(node, Node):
                            template_info = get_template_info(node.source)
                            break
                    cur_frame = cur_frame.f_back
            except:
                pass
            del cur_frame

            alias = getattr(self.db, 'alias', 'default')
            conn = self.db.connection
            # HACK: avoid imports
            if conn:
                engine = conn.__class__.__module__.split('.', 1)[0]
            else:
                engine = 'unknown'

            params = {
                'engine':
                engine,
                'alias':
                alias,
                'sql':
                self.db.ops.last_executed_query(self.cursor, sql,
                                                self._quote_params(params)),
                'duration':
                duration,
                'raw_sql':
                sql,
                'params':
                _params,
                'stacktrace':
                stacktrace,
                'start_time':
                start,
                'stop_time':
                stop,
                'is_slow': (duration > SQL_WARNING_THRESHOLD),
                'is_select':
                sql.lower().strip().startswith('select'),
                'template_info':
                template_info,
            }

            if engine == 'psycopg2':
                # If an erroneous query was ran on the connection, it might
                # be in a state where checking isolation_level raises an
                # exception.
                try:
                    iso_level = conn.isolation_level
                except conn.InternalError:
                    iso_level = 'unknown'
                params.update({
                    'trans_id': self.logger.get_transaction_id(alias),
                    'trans_status': conn.get_transaction_status(),
                    'iso_level': iso_level,
                    'encoding': conn.encoding,
                })

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
Example #29
0
    def execute(self, sql, params=()):
        __traceback_hide__ = True
        start = datetime.now()
        try:
            return self.cursor.execute(sql, params)
        finally:
            stop = datetime.now()
            duration = ms_from_timedelta(stop - start)
            enable_stacktraces = getattr(settings,
                'DEBUG_TOOLBAR_CONFIG', {}).get('ENABLE_STACKTRACES', True)
            if enable_stacktraces:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ''
            try:
                _params = simplejson.dumps(
                        [force_unicode(x, strings_only=True) for x in params]
                            )
            except TypeError:
                pass  # object not JSON serializable

            template_info = None
            cur_frame = sys._getframe().f_back
            try:
                while cur_frame is not None:
                    if cur_frame.f_code.co_name == 'render':
                        node = cur_frame.f_locals['self']
                        if isinstance(node, Node):
                            template_info = get_template_info(node.source)
                            break
                    cur_frame = cur_frame.f_back
            except:
                pass
            del cur_frame

            alias = getattr(self.db, 'alias', 'default')
            conn = connections[alias].connection
            # HACK: avoid imports
            if conn:
                engine = conn.__class__.__module__.split('.', 1)[0]
            else:
                engine = 'unknown'

            params = {
                'engine': engine,
                'alias': alias,
                'sql': self.db.ops.last_executed_query(self.cursor, sql,
                                                self._quote_params(params)),
                'duration': duration,
                'raw_sql': sql,
                'params': _params,
                'hash': sha1(settings.SECRET_KEY \
                                        + smart_str(sql) \
                                        + _params).hexdigest(),
                'stacktrace': stacktrace,
                'start_time': start,
                'stop_time': stop,
                'is_slow': (duration > SQL_WARNING_THRESHOLD),
                'is_select': sql.lower().strip().startswith('select'),
                'template_info': template_info,
            }

            if engine == 'psycopg2':
                from psycopg2.extensions import TRANSACTION_STATUS_INERROR
                params.update({
                    'trans_id': self.logger.get_transaction_id(alias),
                    'trans_status': conn.get_transaction_status(),
                    'iso_level': conn.isolation_level if not conn.get_transaction_status() == TRANSACTION_STATUS_INERROR else "",
                    'encoding': conn.encoding,
                })

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
Example #30
0
    def execute(self, sql, params=()):
        start = datetime.now()
        try:
            return self.cursor.execute(sql, params)
        finally:
            stop = datetime.now()
            duration = ms_from_timedelta(stop - start)
            enable_stacktraces = getattr(settings, "DEBUG_TOOLBAR_CONFIG", {}).get("ENABLE_STACKTRACES", True)
            if enable_stacktraces:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ""
            try:
                _params = json.dumps(map(self._decode, params))
            except Exception:
                pass  # object not JSON serializable

            template_info = None
            cur_frame = sys._getframe().f_back
            try:
                while cur_frame is not None:
                    if cur_frame.f_code.co_name == "render":
                        node = cur_frame.f_locals["self"]
                        if isinstance(node, Node):
                            template_info = get_template_info(node.source)
                            break
                    cur_frame = cur_frame.f_back
            except:
                pass
            del cur_frame

            alias = getattr(self.db, "alias", "default")
            conn = connections[alias].connection
            # HACK: avoid imports
            if conn:
                engine = conn.__class__.__module__.split(".", 1)[0]
            else:
                engine = "unknown"

            params = {
                "engine": engine,
                "alias": alias,
                "sql": self.db.ops.last_executed_query(self.cursor, sql, self._quote_params(params)),
                "duration": duration,
                "raw_sql": sql,
                "params": _params,
                "hash": sha1(settings.SECRET_KEY + smart_str(sql) + _params).hexdigest(),
                "stacktrace": stacktrace,
                "start_time": start,
                "stop_time": stop,
                "is_slow": (duration > SQL_WARNING_THRESHOLD),
                "is_select": sql.lower().strip().startswith("select"),
                "template_info": template_info,
            }

            if engine == "psycopg2":
                from psycopg2.extensions import TRANSACTION_STATUS_INERROR

                params.update(
                    {
                        "trans_id": self.logger.get_transaction_id(alias),
                        "trans_status": conn.get_transaction_status(),
                        "iso_level": conn.isolation_level
                        if not conn.get_transaction_status() == TRANSACTION_STATUS_INERROR
                        else "",
                        "encoding": conn.encoding,
                    }
                )

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
Example #31
0
    def _record(self, method, sql, params):
        start_time = time()
        try:
            return method(sql, params)
        finally:
            stop_time = time()
            duration = (stop_time - start_time) * 1000
            if dt_settings.get_config()["ENABLE_STACKTRACES"]:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ""
            try:
                _params = json.dumps(self._decode(params))
            except TypeError:
                pass  # object not JSON serializable
            template_info = get_template_info()

            alias = getattr(self.db, "alias", "default")
            conn = self.db.connection
            vendor = getattr(conn, "vendor", "unknown")

            # Sql might be an object (such as psycopg Composed).
            # For logging purposes, make sure it's str.
            sql = str(sql)

            params = {
                "vendor": vendor,
                "alias": alias,
                "sql": self.db.ops.last_executed_query(
                    self.cursor, sql, self._quote_params(params)
                ),
                "duration": duration,
                "raw_sql": sql,
                "params": _params,
                "raw_params": params,
                "stacktrace": stacktrace,
                "start_time": start_time,
                "stop_time": stop_time,
                "is_slow": duration > dt_settings.get_config()["SQL_WARNING_THRESHOLD"],
                "is_select": sql.lower().strip().startswith("select"),
                "template_info": template_info,
            }

            if vendor == "postgresql":
                # If an erroneous query was ran on the connection, it might
                # be in a state where checking isolation_level raises an
                # exception.
                try:
                    iso_level = conn.isolation_level
                except conn.InternalError:
                    iso_level = "unknown"
                params.update(
                    {
                        "trans_id": self.logger.get_transaction_id(alias),
                        "trans_status": conn.get_transaction_status(),
                        "iso_level": iso_level,
                        "encoding": conn.encoding,
                    }
                )

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)
Example #32
0
    def execute(self, sql, params=()):
        start_time = time()
        try:
            return self.cursor.execute(sql, params)
        finally:
            stop_time = time()
            duration = (stop_time - start_time) * 1000
            if dt_settings.CONFIG["ENABLE_STACKTRACES"]:
                stacktrace = tidy_stacktrace(reversed(get_stack()))
            else:
                stacktrace = []
            _params = ""
            try:
                _params = json.dumps(list(map(self._decode, params)))
            except Exception:
                pass  # object not JSON serializable

            template_info = None
            cur_frame = sys._getframe().f_back
            try:
                while cur_frame is not None:
                    if cur_frame.f_code.co_name == "render":
                        node = cur_frame.f_locals["self"]
                        if isinstance(node, Node):
                            template_info = get_template_info(node.source)
                            break
                    cur_frame = cur_frame.f_back
            except Exception:
                pass
            del cur_frame

            alias = getattr(self.db, "alias", "default")
            conn = self.db.connection
            # HACK: avoid imports
            if conn:
                engine = conn.__class__.__module__.split(".", 1)[0]
            else:
                engine = "unknown"

            params = {
                "engine": engine,
                "alias": alias,
                "sql": self.db.ops.last_executed_query(self.cursor, sql, self._quote_params(params)),
                "duration": duration,
                "raw_sql": sql,
                "params": _params,
                "stacktrace": stacktrace,
                "start_time": start_time,
                "stop_time": stop_time,
                "is_slow": duration > dt_settings.CONFIG["SQL_WARNING_THRESHOLD"],
                "is_select": sql.lower().strip().startswith("select"),
                "template_info": template_info,
            }

            if engine == "psycopg2":
                # If an erroneous query was ran on the connection, it might
                # be in a state where checking isolation_level raises an
                # exception.
                try:
                    iso_level = conn.isolation_level
                except conn.InternalError:
                    iso_level = "unknown"
                params.update(
                    {
                        "trans_id": self.logger.get_transaction_id(alias),
                        "trans_status": conn.get_transaction_status(),
                        "iso_level": iso_level,
                        "encoding": conn.encoding,
                    }
                )

            # We keep `sql` to maintain backwards compatibility
            self.logger.record(**params)