def test_nested_wrapper_invoked(self): outer_wrapper = self.mock_wrapper() inner_wrapper = self.mock_wrapper() with connection.execute_wrapper(outer_wrapper), connection.execute_wrapper(inner_wrapper): self.call_execute(connection) self.assertEqual(inner_wrapper.call_count, 1) self.call_executemany(connection) self.assertEqual(inner_wrapper.call_count, 2)
def flags(request): """ Change the flags of articles. :query read: One of "true" or "false". :query fave: One of "true" or "false". :query article: One or more article IDs. """ if request.method != "POST": return HttpResponseNotAllowed(["POST"]) updates = {} if "read" in request.POST: if request.POST["read"] == "true": updates["read"] = True elif request.POST["read"] == "false": updates["read"] = False if "fave" in request.POST: if request.POST["fave"] == "true": updates["fave"] = True elif request.POST["fave"] == "false": updates["fave"] = False qs = articles_for_request(request) if updates: with connection.execute_wrapper(log_on_error): qs.update(**updates) data = { "articlesById": {article.id: json_for_article(article) for article in qs.all()}, } return HttpResponse(json.dumps(data), content_type="application/json")
def middleware(request): if not should_capture(request): return get_response(request) logger = TracebackLogger() with connection.execute_wrapper(logger): response = get_response(request) queries = connection.queries.copy() for tb, q in zip(logger.tracebacks, queries): q['traceback'] = tb explain = get_explain(q['sql']) result = explain_cost_r.search(explain.split('\n')[0]) q['explain'] = explain q['explain_cost'] = result.group("cost") q['explain_rows'] = result.group("rows") q['explain_width'] = result.group("width") total_duration = sum(float(q['time']) for q in queries) request_data = { 'method': request.method, 'created_at': datetime.now().isoformat(), 'endpoint': request.build_absolute_uri(), 'total_time': total_duration, 'total_queries': len(queries), 'paths': json.dumps(sys.path[1:]), 'queries': queries, } if queries: save_request(request_data) return response
def execute_graphql_request(self, request: HttpRequest, data: dict): query, variables, operation_name = self.get_graphql_params( request, data) document, error = self.parse_query(query) if error: return error extra_options: Dict[str, Optional[Any]] = {} if self.executor: # We only include it optionally since # executor is not a valid argument in all backends extra_options["executor"] = self.executor try: with connection.execute_wrapper(tracing_wrapper): return document.execute( # type: ignore root=self.get_root_value(), variables=variables, operation_name=operation_name, context=request, middleware=self.middleware, **extra_options, ) except Exception as e: return ExecutionResult(errors=[e], invalid=True)
def test_wrapper_gets_sql(self): wrapper = self.mock_wrapper() sql = "SELECT 'aloha'" + connection.features.bare_select_suffix with connection.execute_wrapper(wrapper), connection.cursor() as cursor: cursor.execute(sql) (_, reported_sql, _, _, _), _ = wrapper.call_args self.assertEqual(reported_sql, sql)
def flags(request): """ Change the flags of articles. :query read: One of "true" or "false". :query fave: One of "true" or "false". :query article: One or more article IDs. """ if request.method != 'POST': return HttpResponseNotAllowed(['POST']) updates = {} if 'read' in request.POST: if request.POST['read'] == 'true': updates['read'] = True elif request.POST['read'] == 'false': updates['read'] = False if 'fave' in request.POST: if request.POST['fave'] == 'true': updates['fave'] = True elif request.POST['fave'] == 'false': updates['fave'] = False qs = articles_for_request(request) if updates: with connection.execute_wrapper(log_on_error): qs.update(**updates) data = { 'articlesById': {article.id: json_for_article(article) for article in qs.all()}, } return HttpResponse(json_encoder.encode(data), content_type='application/json')
def wrapper(*args, **kwargs): logger = logging.getLogger(log_name) ql = QueryLogger() with connection.execute_wrapper(ql): result = func(*args, **kwargs) logger.info(str(ql)) return result
def execute_graphql_request(self, request: HttpRequest, data: dict): with opentracing.global_tracer().start_active_span( "graphql_query") as scope: span = scope.span span.set_tag(opentracing.tags.COMPONENT, "graphql") query, variables, operation_name = self.get_graphql_params( request, data) document, error = self.parse_query(query) if error: return error if document is not None: raw_query_string = document.document_string span.set_tag("graphql.query", raw_query_string) span.set_tag("graphql.query_fingerprint", query_fingerprint(document)) try: query_contains_schema = self.check_if_query_contains_only_schema( document) except GraphQLError as e: return ExecutionResult(errors=[e], invalid=True) extra_options: Dict[str, Optional[Any]] = {} if self.executor: # We only include it optionally since # executor is not a valid argument in all backends extra_options["executor"] = self.executor try: with connection.execute_wrapper(tracing_wrapper): response = None should_use_cache_for_scheme = query_contains_schema & ( not settings.DEBUG) if should_use_cache_for_scheme: key = generate_cache_key(raw_query_string) response = cache.get(key) if not response: response = document.execute( # type: ignore root=self.get_root_value(), variables=variables, operation_name=operation_name, context=request, middleware=self.middleware, **extra_options, ) if should_use_cache_for_scheme: cache.set(key, response) return response except Exception as e: span.set_tag(opentracing.tags.ERROR, True) # In the graphql-core version that we are using, # the Exception is raised for too big integers value. # As it's a validation error we want to raise GraphQLError instead. if str(e).startswith(INT_ERROR_MSG) or isinstance( e, ValueError): e = GraphQLError(str(e)) return ExecutionResult(errors=[e], invalid=True)
def __call__(self, request: HttpRequest) -> HttpResponse: # check for QueryCountMiddleware being included twice if getattr(request, 'querycountmiddleware', None) not in (None, self): msg = "QueryCountMiddleware appears to be already initialised (did you include QueryCountMiddleware multiple times?)" warnings.warn(msg, RuntimeWarning) return self.get_response(request) request.querycountmiddleware = self request.querycountmiddleware_query_count = 0 request.QUERY_COUNT_WARNING_THRESHOLD = getattr( settings, 'QUERY_COUNT_WARNING_THRESHOLD', DEFAULT_QUERY_COUNT_WARNING_THRESHOLD) counter = QueryCounter() with connection.execute_wrapper(counter): response = self.get_response(request) query_count = counter.count if getattr(request, 'QUERY_COUNT_WARNING_THRESHOLD', 0) and query_count >= request.QUERY_COUNT_WARNING_THRESHOLD: logger.warning( f'excessive query count: request "{request.method} {request.path}" ran {query_count} queries' ) return response
def test_database_queried(self): wrapper = self.mock_wrapper() with connection.execute_wrapper(wrapper): with connection.cursor() as cursor: sql = "SELECT 17" + connection.features.bare_select_suffix cursor.execute(sql) seventeen = cursor.fetchall() self.assertEqual(list(seventeen), [(17,)]) self.call_executemany(connection)
def __call__(self, request): try: db_wrapper = HoneyDBWrapper() # db instrumentation is only present in Django > 2.0 with connection.execute_wrapper(db_wrapper): response = self.create_http_event(request) except AttributeError: response = self.create_http_event(request) return response
def test_wrapper_invoked(self): wrapper = self.mock_wrapper() with connection.execute_wrapper(wrapper): self.call_execute(connection) self.assertTrue(wrapper.called) (_, sql, params, many, context), _ = wrapper.call_args self.assertIn("SELECT", sql) self.assertIsNone(params) self.assertIs(many, False) self.assertEqual(context["connection"], connection)
def test_wrapper_invoked_many(self): wrapper = self.mock_wrapper() with connection.execute_wrapper(wrapper): self.call_executemany(connection) self.assertTrue(wrapper.called) (_, sql, param_list, many, context), _ = wrapper.call_args self.assertIn("DELETE", sql) self.assertIsInstance(param_list, (list, tuple)) self.assertIs(many, True) self.assertEqual(context["connection"], connection)
def get(self, request, *args, **kwargs): user = request.GET["user"] ql = QueryLogger() # Get query efficacy with connection.execute_wrapper(ql): found_user = Users.objects.get(username=user) ar_shopitems = found_user.shoplist_set.all().values().annotate( total=Count('item_name')).order_by('total') dict_ar = [item for item in ar_shopitems] result = { 'user_info': model_to_dict(found_user), 'shop_items': dict_ar } print(ql.queries) return JsonResponse(result, status=200)
def execute_graphql_request( self, request, data, query, variables, operation_name, ): if not query: raise HttpError( HttpResponseBadRequest("Must provide query string.")) with opentracing.global_tracer().start_active_span( "graphql_query") as scope: span = scope.span span.set_tag(opentracing.tags.COMPONENT, "GraphQL") try: document = parse(query) except GraphQLError as e: return ExecutionResult(errors=[e], data=dict(invalid=True)) if request.method.lower() == "get": operation_ast = get_operation_ast(document, operation_name) if operation_ast and operation_ast.operation != OperationType.QUERY: raise HttpError( HttpResponseNotAllowed( ["POST"], "Can only perform a {} operation from a POST request." .format(operation_ast.operation.value), )) validation_errors = validate(self.schema.graphql_schema, document) if validation_errors: return ExecutionResult(data=None, errors=validation_errors) try: with connection.execute_wrapper(tracing_wrapper): return self.schema.execute( source=query, root_value=self.get_root_value(request), variable_values=variables, operation_name=operation_name, context_value=self.get_context(request), middleware=self.get_middleware(request), ) except GraphQLError as e: span.set_tag(opentracing.tags.ERROR, True) return ExecutionResult(errors=[e])
def inner_func(*args, **kwargs): func_info = ['Target:'] query_logger = QueryLogger() with connection.execute_wrapper(query_logger): result = func(*args, **kwargs) query_logger.count() try: if len(args) == 1: cmd, = args func_info.append(cmd.__module__) elif len(args) > 1: _, request, *_ = args if request.path: func_info.append(request.path) if request.resolver_match: func_info.append(request.resolver_match._func_path) except ValueError: func_info.append(func.__qualname__) query_logger.print_stats() print(' '.join(func_info)) return result
def execute_graphql_request(self, request: HttpRequest, data: dict): with opentracing.global_tracer().start_active_span( "graphql_query") as scope: span = scope.span span.set_tag(opentracing.tags.COMPONENT, "GraphQL") query, variables, operation_name = self.get_graphql_params( request, data) document, error = self.parse_query(query) if error: return error if document is not None: raw_query_string = document.document_string span.set_tag("graphql.query", raw_query_string) try: self.check_if_query_contains_only_schema(document) except GraphQLError as e: return ExecutionResult(errors=[e], invalid=True) extra_options: Dict[str, Optional[Any]] = {} if self.executor: # We only include it optionally since # executor is not a valid argument in all backends extra_options["executor"] = self.executor try: with connection.execute_wrapper(tracing_wrapper): return document.execute( # type: ignore root=self.get_root_value(), variables=variables, operation_name=operation_name, context=request, middleware=self.middleware, **extra_options, ) except Exception as e: span.set_tag(opentracing.tags.ERROR, True) return ExecutionResult(errors=[e], invalid=True)
def execute_graphql_request(self, request: HttpRequest, data: dict): with ot.global_tracer().start_active_span( operation_name="graphql_query") as scope: span = scope.span span.set_tag(ot_tags.COMPONENT, "graphql_query") query, variables, operation_name = self.get_graphql_params( request, data) document, error = self.parse_query(query) if error: return error if document is not None: span.log_kv({ "query": document.document_string[:settings. OPENTRACING_MAX_QUERY_LENGTH_LOG] }) extra_options: Dict[str, Optional[Any]] = {} if self.executor: # We only include it optionally since # executor is not a valid argument in all backends extra_options["executor"] = self.executor try: with connection.execute_wrapper(tracing_wrapper): return document.execute( # type: ignore root=self.get_root_value(), variables=variables, operation_name=operation_name, context=request, middleware=self.middleware, **extra_options, ) except Exception as e: span.set_tag(ot_tags.ERROR, True) return ExecutionResult(errors=[e], invalid=True)
def process_view(self, request, callback, callback_args, callback_kwargs): if 'api' in request.path: logger = TracebackLogger() self.prof.enable() with connection.execute_wrapper(logger): response = self.prof.runcall( callback, request, *callback_args, **callback_kwargs ) self.prof.disable() queries = connection.queries.copy() self.create_traceback( request, callback, logger.tracebacks, queries ) return response
def _test_duplicate_mail(self, mail): errors = [] def log_query_errors(execute, sql, params, many, context): try: result = execute(sql, params, many, context) except Exception as e: errors.append(e) raise return result _parse_mail(mail) with self.assertRaises(DuplicateMailError): with connection.execute_wrapper(log_query_errors): # If we see any database errors from the duplicate insert # (typically an IntegrityError), the insert will abort the # current transaction. This atomic() ensures that we can # recover, and perform subsequent queries. with atomic(): _parse_mail(mail) self.assertEqual(errors, [])
def get_response_handler(self, request: HttpRequest) -> HttpResponse: start_time = time.time() ql = QueryTimingLogger() with connection.execute_wrapper(ql): response = super().get_response_handler(request) if hasattr(response, 'timings'): response_durations = response.timings # type: ignore else: response_durations = None total_time = self._round_time(time.time() - start_time) if getattr(request, 'is_bulk', False): response['Response-Time'] = str(total_time) else: if response_durations: response_durations = f', {", ".join(map(self.__duration_handler, response_durations.items()))}' else: response_durations = "" response_durations += f', db_execution_time;dur={self._round_time(ql.queries_time)}' response[ 'Server-Timing'] = f'total;dur={total_time}{response_durations or ""}' return response
def __call__(self, request): with connection.execute_wrapper(QueryWrapper(request)): return self.get_response(request)
def __call__(self, request): with connection.execute_wrapper(TikibarDatabaseWrapper()): return super().__call__(request)
def assert_max_queries(self, num, check_duration=True): tracer = MyQueryTracer() try: with connection.execute_wrapper(tracer): yield finally: if check_duration: duration = datetime.datetime.now() - tracer.started_at duration_seconds = duration.seconds else: duration_seconds = 0 count = len(tracer.trace) if num == -1: # pragma: no cover print('Operation resulted in %s queries' % count) elif count > num: # pragma: no cover queries = 'Captured queries:' prefix = '\n ' limit = 200 # begrens aantal queries dat we printen for i, call in enumerate(tracer.trace, start=1): if i > 1: queries += '\n' queries += prefix + str(call['now']) queries += '\n [%d] ' % i queries += self._reformat_sql(prefix, call['sql']) queries += prefix + '%s µs' % call['duration_us'] queries += '\n' for fname, linenr, base in call['stack']: queries += prefix + '%s:%s %s' % (fname, linenr, base) # for limit -= 1 if limit <= 0: break # for msg = "Too many queries: %s; maximum %d. " % (count, num) self.fail(msg=msg + queries) if count <= num: # kijk of het wat minder kan if num > 20: ongebruikt = num - count if ongebruikt / num > 0.25: # pragma: no cover self.fail(msg="Maximum (%s) has a lot of margin. Can be set as low as %s" % (num, count)) if duration_seconds > 1.5: # pragma: no cover print("Operation took suspiciously long: %.2f seconds (%s queries took %.2f ms)" % ( duration_seconds, len(tracer.trace), tracer.total_duration_us / 1000.0)) if len(tracer.trace) > 500: # pragma: no cover print("Operation required a lot of database interactions: %s queries" % len(tracer.trace)) if REPORT_QUERY_ORIGINS: # pragma: no cover # sorteer op aantal aanroepen counts = list() for msg, count in tracer.stack_counts.items(): tup = (count, msg) counts.append(tup) # for counts.sort(reverse=True) # hoogste eerst first = True for count, msg in counts: if count > 1: if first: first = False print('-----') print('%5s %s' % (count, msg[7:]))
def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT): with connection.execute_wrapper(override_readonly): super()._base_set(mode, key, value, timeout)
def __call__(self, request): if django.VERSION >= (2,): # pragma: NO COVER with connection.execute_wrapper(_trace_db_call): return super(OpencensusMiddleware, self).__call__(request) return super(OpencensusMiddleware, self).__call__(request)
def _base_delete_many(self, keys): with connection.execute_wrapper(override_readonly): super()._base_delete_many(keys)
def start_count(self): """Reset query count to 0 and return context manager for wrapping db queries.""" self.count = 0 return connection.execute_wrapper(self)
def wrapper(*args, **kwargs): with oneagent.get_sdk().trace_custom_service(function.__name__, function.__name__): with connection.execute_wrapper(dt): return function(*args, **kwargs)
def execute_graphql_request(self, request: HttpRequest, data: dict): with opentracing.global_tracer().start_active_span( "graphql_query") as scope: span = scope.span span.set_tag(opentracing.tags.COMPONENT, "graphql") span.set_tag( opentracing.tags.HTTP_URL, request.build_absolute_uri(request.get_full_path()), ) query, variables, operation_name = self.get_graphql_params( request, data) query_cost = 0 document, error = self.parse_query(query) with observability.report_gql_operation() as operation: operation.query = document operation.name = operation_name operation.variables = variables if error: return error if document is not None: raw_query_string = document.document_string span.set_tag("graphql.query", raw_query_string) span.set_tag("graphql.query_fingerprint", query_fingerprint(document)) try: query_contains_schema = self.check_if_query_contains_only_schema( document) except GraphQLError as e: return ExecutionResult(errors=[e], invalid=True) query_cost, cost_errors = validate_query_cost( schema, document, variables, COST_MAP, settings.GRAPHQL_QUERY_MAX_COMPLEXITY, ) span.set_tag("graphql.query_cost", query_cost) if settings.GRAPHQL_QUERY_MAX_COMPLEXITY and cost_errors: result = ExecutionResult(errors=cost_errors, invalid=True) return set_query_cost_on_result(result, query_cost) extra_options: Dict[str, Optional[Any]] = {} if self.executor: # We only include it optionally since # executor is not a valid argument in all backends extra_options["executor"] = self.executor try: with connection.execute_wrapper(tracing_wrapper): response = None should_use_cache_for_scheme = query_contains_schema & ( not settings.DEBUG) if should_use_cache_for_scheme: key = generate_cache_key(raw_query_string) response = cache.get(key) if not response: response = document.execute( # type: ignore root=self.get_root_value(), variables=variables, operation_name=operation_name, context=get_context_value(request), middleware=self.middleware, **extra_options, ) if should_use_cache_for_scheme: cache.set(key, response) if app := getattr(request, "app", None): span.set_tag("app.name", app.name) return set_query_cost_on_result(response, query_cost) except Exception as e: span.set_tag(opentracing.tags.ERROR, True) if app := getattr(request, "app", None): span.set_tag("app.name", app.name) # In the graphql-core version that we are using, # the Exception is raised for too big integers value. # As it's a validation error we want to raise GraphQLError instead. if str(e).startswith(INT_ERROR_MSG) or isinstance( e, ValueError): e = GraphQLError(str(e)) return ExecutionResult(errors=[e], invalid=True)
def __init__(self): self.query_collector = QueryCollector() self.connection_ctx_manager = connection.execute_wrapper(self.query_collector)