def _prepare_app(broker_url=None, execute=None): broker_url = broker_url or conf.get('celery', 'BROKER_URL') execute = execute or celery_executor.execute_command.__wrapped__ test_config = dict(celery_executor.celery_configuration) test_config.update({'broker_url': broker_url}) test_app = Celery(broker_url, config_source=test_config) test_execute = test_app.task(execute) patch_app = mock.patch('airflow.executors.celery_executor.app', test_app) patch_execute = mock.patch( 'airflow.executors.celery_executor.execute_command', test_execute) backend = test_app.backend if hasattr(backend, 'ResultSession'): # Pre-create the database tables now, otherwise SQLA vis Celery has a # race condition where it one of the subprocesses can die with "Table # already exists" error, because SQLA checks for which tables exist, # then issues a CREATE TABLE, rather than doing CREATE TABLE IF NOT # EXISTS session = backend.ResultSession() session.close() with patch_app, patch_execute: try: yield test_app finally: # Clear event loop to tear down each celery instance set_event_loop(None)
def handle(self, queue, *args, **options): fmt = '%(asctime)s %(name)-12s: %(levelname)-8s %(message)s' log_level = 40 - (options['verbosity'] * 10) logging.basicConfig(level=log_level, format=fmt) # TODO: Ensure that failed processing does not requeue task into # work queue set_event_loop(Hub()) kwargs = { 'transport_options': conf.broker_transport_options, } with app.connection(**kwargs) as conn: logger.info("Launching worker") worker = Worker(conn, queues=[Queue(queue)]) worker.connect_max_retries = 1 while True: try: breakers.celery.call(worker.run) except KeyboardInterrupt: logger.info("KeyboardInterrupt, exiting. Bye!") break except breakers.celery.expected_errors: rest_val = 5 logger.warning( "Open circuit detected. " "Sleeping for %s seconds and then will try again.", rest_val) time.sleep(rest_val)
def scoped_pyramid_app(celery_app, db_init_and_wipe): from .testing import integration_test_settings settings = integration_test_settings() from pyramid import testing config = testing.setUp(settings=settings) # Register the routes for reverse generation of urls. config.include('cnxpublishing.views') # Tack the pyramid config on the celery app. # See cnxpublishing.tasks.includeme config.registry.celery_app = celery_app config.registry.celery_app.conf['pyramid_config'] = config config.scan('cnxpublishing.subscribers') # Celery only creates the tables once per session. This gets celery to # create the tables again (as a side effect of a new session manager) since # we are starting with an empty database. from celery.backends.database.session import SessionManager celery_app.backend.ResultSession(SessionManager()) # Initialize the authentication policy. from openstax_accounts.stub import main main(config) config.commit() yield config testing.tearDown() # Force celery to create a new event loop. # See https://github.com/celery/celery/issues/4088 from kombu.asynchronous import set_event_loop set_event_loop(None)
def hub(request): from kombu.asynchronous import Hub, get_event_loop, set_event_loop _prev_hub = get_event_loop() hub = Hub() set_event_loop(hub) yield hub if _prev_hub is not None: set_event_loop(_prev_hub)
def create(self, w): w.hub = get_event_loop() if w.hub is None: required_hub = getattr(w._conninfo, 'requires_hub', None) w.hub = set_event_loop( (required_hub if required_hub else _Hub)(w.timer)) self._patch_thread_primitives(w) return self
def create(self, w): w.hub = get_event_loop() if w.hub is None: required_hub = getattr(w._conninfo, 'requires_hub', None) w.hub = set_event_loop(( required_hub if required_hub else _Hub)(w.timer)) self._patch_thread_primitives(w) return self
def _prepare_app(self, broker_url=None, execute=None): broker_url = broker_url or conf.get('celery', 'BROKER_URL') execute = execute or celery_executor.execute_command.__wrapped__ test_config = dict(celery_executor.celery_configuration) test_config.update({'broker_url': broker_url}) test_app = Celery(broker_url, config_source=test_config) test_execute = test_app.task(execute) patch_app = mock.patch('airflow.executors.celery_executor.app', test_app) patch_execute = mock.patch('airflow.executors.celery_executor.execute_command', test_execute) with patch_app, patch_execute: try: yield test_app finally: # Clear event loop to tear down each celery instance set_event_loop(None)
def _prepare_app(self, broker_url=None, execute=None): broker_url = broker_url or configuration.conf.get('celery', 'BROKER_URL') execute = execute or celery_executor.execute_command.__wrapped__ test_config = dict(celery_executor.celery_configuration) test_config.update({'broker_url': broker_url}) test_app = Celery(broker_url, config_source=test_config) test_execute = test_app.task(execute) patch_app = mock.patch('airflow.executors.celery_executor.app', test_app) patch_execute = mock.patch('airflow.executors.celery_executor.execute_command', test_execute) with patch_app, patch_execute: try: yield test_app finally: # Clear event loop to tear down each celery instance set_event_loop(None)
def create(self, w): w.hub = get_event_loop() if w.hub is None: w.hub = set_event_loop(_Hub(w.timer)) self._patch_thread_primitives(w) return self