async def test_request_lifecycle_signals_dispatched_with_thread_sensitive(self): class SignalHandler: """Track threads handler is dispatched on.""" threads = [] def __call__(self, **kwargs): self.threads.append(threading.current_thread()) signal_handler = SignalHandler() request_started.connect(signal_handler) request_finished.connect(signal_handler) # Perform a basic request. application = get_asgi_application() scope = self.async_request_factory._base_scope(path="/") communicator = ApplicationCommunicator(application, scope) await communicator.send_input({"type": "http.request"}) response_start = await communicator.receive_output() self.assertEqual(response_start["type"], "http.response.start") self.assertEqual(response_start["status"], 200) response_body = await communicator.receive_output() self.assertEqual(response_body["type"], "http.response.body") self.assertEqual(response_body["body"], b"Hello World!") # Give response.close() time to finish. await communicator.wait() # AsyncToSync should have executed the signals in the same thread. request_started_thread, request_finished_thread = signal_handler.threads self.assertEqual(request_started_thread, request_finished_thread) request_started.disconnect(signal_handler) request_finished.disconnect(signal_handler)
async def test_request_lifecycle_signals_dispatched_with_thread_sensitive( self): class SignalHandler: """Track threads handler is dispatched on.""" threads = [] def __call__(self, **kwargs): self.threads.append(threading.current_thread()) signal_handler = SignalHandler() request_started.connect(signal_handler) request_finished.connect(signal_handler) # Perform a basic request. application = get_asgi_application() scope = self.async_request_factory._base_scope(path='/') communicator = ApplicationCommunicator(application, scope) await communicator.send_input({'type': 'http.request'}) response_start = await communicator.receive_output() self.assertEqual(response_start['type'], 'http.response.start') self.assertEqual(response_start['status'], 200) response_body = await communicator.receive_output() self.assertEqual(response_body['type'], 'http.response.body') self.assertEqual(response_body['body'], b'Hello World!') # Give response.close() time to finish. await communicator.wait() # At this point, AsyncToSync does not have a current executor. Thus # SyncToAsync falls-back to .single_thread_executor. target_thread = next(iter(SyncToAsync.single_thread_executor._threads)) request_started_thread, request_finished_thread = signal_handler.threads self.assertEqual(request_started_thread, target_thread) self.assertEqual(request_finished_thread, target_thread) request_started.disconnect(signal_handler) request_finished.disconnect(signal_handler)
def __call__(self, environ): from django.conf import settings # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._request_middleware is None: self.load_middleware() request_started.disconnect(close_old_connections) request_started.send(sender=self.__class__) request_started.connect(close_old_connections) request = WSGIRequest(environ) # sneaky little hack so that we can easily get round # CsrfViewMiddleware. This makes life easier, and is probably # required for backwards compatibility with external tests against # admin views. request._dont_enforce_csrf_checks = not self.enforce_csrf_checks response = self.get_response(request) # We're emulating a WSGI server; we must call the close method # on completion. if response.streaming: response.streaming_content = closing_iterator_wrapper( response.streaming_content, response.close) else: request_finished.disconnect(close_old_connections) response.close() # will fire request_finished request_finished.connect(close_old_connections) return response
def __call__(self, environ): from django.conf import settings # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._request_middleware is None: self.load_middleware() request_started.disconnect(close_old_connections) request_started.send(sender=self.__class__) request_started.connect(close_old_connections) request = WSGIRequest(environ) # sneaky little hack so that we can easily get round # CsrfViewMiddleware. This makes life easier, and is probably # required for backwards compatibility with external tests against # admin views. request._dont_enforce_csrf_checks = not self.enforce_csrf_checks response = self.get_response(request) # We're emulating a WSGI server; we must call the close method # on completion. if response.streaming: response.streaming_content = closing_iterator_wrapper(response.streaming_content, response.close) else: request_finished.disconnect(close_old_connections) response.close() # will fire request_finished request_finished.connect(close_old_connections) return response
def register_handlers(client): from django.core.signals import got_request_exception, request_started, request_finished from elasticapm.contrib.django.handlers import exception_handler # Connect to Django's internal signal handlers got_request_exception.disconnect(dispatch_uid=ERROR_DISPATCH_UID) got_request_exception.connect(partial(exception_handler, client), dispatch_uid=ERROR_DISPATCH_UID, weak=False) request_started.disconnect(dispatch_uid=REQUEST_START_DISPATCH_UID) request_started.connect( lambda sender, *args, **kwargs: client.begin_transaction('request') if _should_start_transaction(client) else None, dispatch_uid=REQUEST_START_DISPATCH_UID, weak=False) request_finished.disconnect(dispatch_uid=REQUEST_FINISH_DISPATCH_UID) request_finished.connect(lambda sender, **kwargs: client.end_transaction() if _should_start_transaction(client) else None, dispatch_uid=REQUEST_FINISH_DISPATCH_UID, weak=False) # If we can import celery, register ourselves as exception handler try: import celery # noqa F401 from elasticapm.contrib.celery import register_exception_tracking try: register_exception_tracking(client) except Exception as e: client.logger.exception( 'Failed installing django-celery hook: %s' % e) except ImportError: client.logger.debug("Not instrumenting Celery, couldn't import")
def __enter__(self): self.force_debug_cursor = self.connection.force_debug_cursor self.connection.force_debug_cursor = True self.initial_queries = len(self.connection.queries_log) self.final_queries = None request_started.disconnect(reset_queries) return self
def verbose_cursor(): old_method = BaseDatabaseWrapper.cursor l, _, new_method = make_verbose_cursor() request_started.disconnect(reset_queries) BaseDatabaseWrapper.cursor = new_method yield l BaseDatabaseWrapper.cursor = old_method request_started.connect(reset_queries)
def process_request(self, _): if RequestQueryCountManager.get_testcase_container(): # Took from django.test.utils.CaptureQueriesContext self.force_debug_cursor = self.connection.force_debug_cursor self.connection.force_debug_cursor = True self.initial_queries = len(self.connection.queries_log) self.final_queries = None request_started.disconnect(reset_queries)
def __enter__(self): self.force_debug_cursor = self.connection.force_debug_cursor self.connection.force_debug_cursor = True # Run any initialization queries if needed so that they won't be # included as part of the count. self.connection.ensure_connection() self.initial_queries = len(self.connection.queries_log) self.final_queries = None request_started.disconnect(reset_queries) return self
def __enter__(self): # entering the 'with' block self.force_debug_cursor = self.connection.force_debug_cursor self.connection.force_debug_cursor = True self.connection.ensure_connection() self.initial_queries = len(self.connection.queries_log) request_started.disconnect( reset_queries ) # apparently prevents queries_log from being emptied return self
def _model_checks(app_configs, **kwargs): from django.apps import apps from django.core.signals import request_started request_started.disconnect(dispatch_uid=_REQUEST_STARTED_UID) if app_configs is None: app_configs = apps.get_app_configs() messages = [] _initialize(app_configs, messages) return messages
def queries(count=None, using=None): """ A context manager that captures the queries that were made. :param count: assert this number of queries were made :param using: alias of the database to monitor .. note:: The `list` of queries is not populated until after the context manager exits. Usage:: with queries() as qs: User.objects.count() assert len(qs) == 5 # The same could be rewritten as with queries(count=5): User.objects.count() """ if using is None: using = DEFAULT_DB_ALIAS conn = connections[using] # For compatbility with Django 1.2, apply necessary patching. patches = [] if not hasattr(conn, "use_debug_cursor"): patches.append(hacks.django12_debug_cursor(conn)) with utils.nested(*patches): # A debug cursor saves all the queries to conn.queries, in case one isn't # already being used, restore the current state after the test. was_debug_cursor = conn.use_debug_cursor conn.use_debug_cursor = True prior = len(conn.queries) executed = [] request_started.disconnect(reset_queries) try: yield executed finally: request_started.connect(reset_queries) conn.use_debug_cursor = was_debug_cursor executed[:] = conn.queries[prior:] if count is not None: assert len(executed) == count
def do_benchmark(): start_times = [] times = [] def handle_started(*args, **kwargs): start_times.append(time()) def handle_finished(*args, **kwargs): times.append(time() - start_times[-1]) request_started.connect(handle_started) request_finished.connect(handle_finished) try: yield times finally: request_started.disconnect(handle_started) request_finished.disconnect(handle_finished)
def _request_started(sender, **kwargs): from django.apps import apps from django.core.signals import request_started from logging import getLogger request_started.disconnect(dispatch_uid=_REQUEST_STARTED_UID) app_configs = apps.get_app_configs() messages = [] _initialize(app_configs, messages) if messages: logger = getLogger(__name__) for message in messages: logger.critical(message)
async def __call__(self, scope): # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._middleware_chain is None: self.load_middleware(is_async=True) # Extract body file from the scope, if provided. if "_body_file" in scope: body_file = scope.pop("_body_file") else: body_file = FakePayload("") request_started.disconnect(close_old_connections) await sync_to_async(request_started.send, thread_sensitive=False)(sender=self.__class__, scope=scope) request_started.connect(close_old_connections) request = ASGIRequest(scope, body_file) # Sneaky little hack so that we can easily get round # CsrfViewMiddleware. This makes life easier, and is probably required # for backwards compatibility with external tests against admin views. request._dont_enforce_csrf_checks = not self.enforce_csrf_checks # Request goes through middleware. response = await self.get_response_async(request) # Simulate behaviors of most Web servers. conditional_content_removal(request, response) # Attach the originating ASGI request to the response so that it could # be later retrieved. response.asgi_request = request # Emulate a server by calling the close method on completion. if response.streaming: response.streaming_content = await sync_to_async( closing_iterator_wrapper, thread_sensitive=False)( response.streaming_content, response.close, ) else: request_finished.disconnect(close_old_connections) # Will fire request_finished. await sync_to_async(response.close, thread_sensitive=False)() request_finished.connect(close_old_connections) return response
def __call__(self, environ): # Set up middleware if needed. We couldn't do this earlier, because # settings weren't available. if self._middleware_chain is None: self.load_middleware() request_started.disconnect(close_old_connections) request_started.send(sender=self.__class__, environ=environ) request_started.connect(close_old_connections) request = WSGIRequest(environ) # sneaky little hack so that we can easily get round # CsrfViewMiddleware. This makes life easier, and is probably # required for backwards compatibility with external tests against # admin views. request._dont_enforce_csrf_checks = not self.enforce_csrf_checks # Request goes through middleware. response = self.get_response(request) # Simulate behaviors of most Web servers. conditional_content_removal(request, response) # Attach the originating request to the response so that it could be # later retrieved. response.wsgi_request = request # We're emulating a WSGI server; we must call the close method # on completion. if response.streaming: response.streaming_content = closing_iterator_wrapper( response.streaming_content, response.close) else: request_finished.disconnect(close_old_connections) response.close() # will fire request_finished request_finished.connect(close_old_connections) return response
def templates_do_not_do_queries(app_configs, **kwargs): # get only the processors that aren't defaults processors = set(settings.TEMPLATE_CONTEXT_PROCESSORS) base_processors = set(global_settings.TEMPLATE_CONTEXT_PROCESSORS) only_these_processors = processors - base_processors request = RequestFactory().get('/') issues = [] # emulates .django.test.utils.CaptureQueriesContext.__enter__ connection.use_debug_cursor = True request_started.disconnect(reset_queries) raw_msg = 'executed {{ count }} quer{{ count|pluralize:"y,ies" }}' msg_template = Template(raw_msg) for processor in only_these_processors: count_before = len(connection.queries) # fetch and execute the context processor. # emulates django.template.context.RequestContext.__init__ data = import_string(processor)(request) # call any callables, and hope that __repr__ is defined and # causes an evaluation. for key, value in data.items(): if callable(value): value = value() repr(value) count_after = len(connection.queries) if count_after > count_before: final_count = count_after - count_before msg = msg_template.render(Context({'count': final_count})) issues.append( checks.Warning(msg, obj=processor, id="morechecks.W001")) # emulates .django.test.utils.CaptureQueriesContext.__exit__ connection.use_debug_cursor = False request_started.connect(reset_queries) return issues
def setUp(self): request_started.disconnect(close_old_connections)
def tearDown(self): request_started.disconnect(self.register_started) request_finished.disconnect(self.register_finished)
import logging
def create_badges(sender, **kwargs): request_started.disconnect(create_badges, dispatch_uid=uid) CountryBadge.create_all()
def __enter__(self): self.old_debug_cursor = self.connection.use_debug_cursor self.connection.use_debug_cursor = True self.starting_queries = len(self.connection.queries) request_started.disconnect(reset_queries) return self
async def __call__(self, scope, receive, send): request_started.disconnect(close_old_connections) try: await super().__call__(scope, receive, send) finally: request_started.connect(close_old_connections)
def uninstall(self): request_started.disconnect(self.before_request) got_request_exception.disconnect(self.exception_handler) if self.celery_handler: self.celery_handler.uninstall()
from __future__ import unicode_literals
def __enter__(self): self.old_debug = settings.DEBUG settings.DEBUG = True self.starting_queries = len(self.connection.queries) request_started.disconnect(reset_queries) return self
def setUp(self): request_started.disconnect(close_old_connections) self.application = get_lgi_application()
def __enter__(self): self.force_debug_cursor = self.connection.force_debug_cursor self.connection.force_debug_cursor = True self.starting_count = len(self.connection.queries) request_started.disconnect(reset_queries)
def handler(**kwargs): self.raise_deferred_exception() request_started.disconnect(handler)