def bootstrap_worker(sender, **kwargs): request = sender.app.webapp_bootstrap() sender.app.request = request # Configure Sentry reporting on task failure register_signal(request.sentry) register_logger_signal(request.sentry, loglevel=logging.ERROR)
def init_celery(): """ Initialize Celery, and make our app instance available as `celery_app` for use by the `celery` command. """ from django.conf import settings from raven import Client from raven.contrib.celery import register_signal, register_logger_signal from amo import celery # I think `manage.py celery` relies on this global? We typically don't run # celery like that anymore though. global celery_app celery_app = celery.app # Hook up Sentry in celery. client = Client(settings.SENTRY_DSN) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client) # After upgrading raven we can specify loglevel=logging.INFO to override # the default (which is ERROR). register_logger_signal(client)
def make_celery(app, celery): """ From http://flask.pocoo.org/docs/0.10/patterns/celery/ """ # Register our custom serializer type before updating the configuration. from kombu.serialization import register from doorman.celery_serializer import djson_dumps, djson_loads register( 'djson', djson_dumps, djson_loads, content_type='application/x-djson', content_encoding='utf-8' ) # Actually update the config celery.config_from_object(app.config) # Register Sentry client if 'SENTRY_DSN' in app.config and app.config['SENTRY_DSN']: client = Client(app.config['SENTRY_DSN']) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client) TaskBase = celery.Task class ContextTask(TaskBase): abstract = True def __call__(self, *args, **kwargs): with app.app_context(): return TaskBase.__call__(self, *args, **kwargs) celery.Task = ContextTask return celery
def register_handlers(): from django.core.signals import got_request_exception # HACK: support Sentry's internal communication if 'sentry' in django_settings.INSTALLED_APPS: from django.db import transaction @transaction.commit_on_success def wrap_sentry(request, **kwargs): if transaction.is_dirty(): transaction.rollback() return sentry_exception_handler(request, **kwargs) exception_handler = wrap_sentry else: exception_handler = sentry_exception_handler # Connect to Django's internal signal handler got_request_exception.connect(exception_handler) # If Celery is installed, register a signal handler if 'djcelery' in django_settings.INSTALLED_APPS: from raven.contrib.celery import register_signal, register_logger_signal try: register_signal(client) except Exception: logger.exception('Failed installing error handling for Celery') try: register_logger_signal(client) except Exception: logger.exception('Failed installing logging handler for Celery')
def create_celery_app(app=None, config="worker"): """ adapted from http://flask.pocoo.org/docs/0.10/patterns/celery/ (added the wsgi_environ stuff) """ app = app or create_app(config=config) celery.main = app.import_name celery.conf["BROKER_URL"] = app.config["CELERY_BROKER_URL"] celery.conf.update(app.config) TaskBase = celery.Task class ContextTask(TaskBase): abstract = True def __call__(self, *args, **kwargs): if "wsgi_environ" in kwargs: wsgi_environ = kwargs.pop("wsgi_environ") else: wsgi_environ = None with app.app_context(): if wsgi_environ: with app.request_context(wsgi_environ): return TaskBase.__call__(self, *args, **kwargs) else: return TaskBase.__call__(self, *args, **kwargs) celery.Task = ContextTask register_logger_signal(sentry.client) register_signal(sentry.client) return celery
def configure_error_handling(sender, **kargs): """Rollbar and Sentry integration Based on https://www.mattlayman.com/blog/2017/django-celery-rollbar/ """ if not bool(os.environ.get('CELERY_WORKER_RUNNING', False)): return from django.conf import settings if HAS_ROLLBAR and hasattr(settings, 'ROLLBAR'): rollbar.init(**settings.ROLLBAR) def celery_base_data_hook(request, data): data['framework'] = 'celery' rollbar.BASE_DATA_HOOK = celery_base_data_hook @task_failure.connect def handle_task_failure(**kw): rollbar.report_exc_info(extra_data=kw) if HAS_RAVEN and hasattr(settings, 'RAVEN_CONFIG'): client = Client( settings.RAVEN_CONFIG['dsn'], release=settings.RAVEN_CONFIG.get('release'), environment=settings.RAVEN_CONFIG.get('environment'), ) register_signal(client, ignore_expected=True) register_logger_signal(client, loglevel=logging.ERROR)
def on_configure(self): if 'raven.contrib.django.raven_compat' in settings.INSTALLED_APPS: client = raven.Client(**settings.RAVEN_CONFIG) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) celery.conf.update(app.config) db.init_app(app) login_manager.init_app(app) mail.init_app(app) if not app.config['DEBUG']: # https://docs.getsentry.com/hosted/clients/python/integrations/flask/ sentry.init_app(app, dsn=app.config['SENTRY_DSN']) # https://docs.getsentry.com/hosted/clients/python/integrations/celery/ client = Client(app.config['SENTRY_DSN']) register_logger_signal(client) register_signal(client) from .admin import admin as admin_blueprint app.register_blueprint(admin_blueprint, url_prefix='/admin') from .api_v1 import api_module as api_blueprint app.register_blueprint(api_blueprint, url_prefix='/api/v1') from .auth import auth as auth_blueprint app.register_blueprint(auth_blueprint, url_prefix='/auth') from .public import public as public_blueprint app.register_blueprint(public_blueprint) return app
def on_configure(self): client = RavenClient(SETTINGS.get('sentry_dsn')) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def on_configure(self): client = raven.Client(settings.RAVEN_URL) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def on_configure(self): client = raven.Client(SENTRY_LINK) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def on_configure(self): if settings.SENTRY_ENABLED: import raven from raven.contrib.celery import register_signal, register_logger_signal client = raven.Client(settings.RAVEN_CONFIG["dsn"]) register_logger_signal(client) register_signal(client)
def on_configure(self): if hasattr(settings, 'RAVEN_CONFIG') and settings.RAVEN_CONFIG['dsn']: import raven from raven.contrib.celery import (register_signal, register_logger_signal) client = raven.Client(settings.RAVEN_CONFIG['dsn']) register_logger_signal(client) register_signal(client)
def init_app(self, app): self.app = app self.config_from_object(app.config) if 'SENTRY_DSN' in app.config and app.config['SENTRY_DSN'] is not None: self.sentry = raven.Client(app.config['SENTRY_DSN']) register_logger_signal(self.sentry) register_signal(self.sentry)
def __init__(self, app): if app.config.get('SENTRY_DSN'): app.config.setdefault('SENTRY_NAME', app.config.get('SERVER_NAME')) self.sentry = Sentry(app, register_signal=False, wrap_wsgi=False, logging=True, level=logging.WARNING) register_logger_signal(self.sentry.client) register_signal(self.sentry.client) else: self.sentry = None
def on_configure(self): if not getattr(settings, 'CELERY_RAVEN_DSN'): # for no send raven reports from dev server return client = raven.Client(settings.CELERY_RAVEN_DSN) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def on_configure(self): # pylint: disable=method-hidden if not client: return # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def on_configure(self): from raven.contrib.django.models import client from raven.contrib.celery import register_signal, register_logger_signal # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def on_configure(self): client = raven.Client(os.getenv('SENTRY_DSN'), environment=os.getenv('SENTRY_ENVIRONMENT')) # Always ensure you import register_logger_signal, register_signal and not their parent modules # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def on_configure(self): if settings.PDB_ENABLE_SENTRY: # ignore for coverage client = raven.Client(settings.PDB_SENTRY_DSN) client.release = raven.fetch_git_sha(os.path.dirname(os.pardir)) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object('django.conf:settings') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration client = Client(dsn=settings.RAVEN_CONFIG['dsn']) register_signal(client)
def on_configure(self): import raven from raven.contrib.celery import register_signal, register_logger_signal client = raven.Client() # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def on_configure(self): if self.sentry_dsn: client = Client(self.sentry_dsn) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client) add_sentry_handler_to_celery_task_logger(client, self.sentry_handler_log_level)
def on_configure(self): try: client = raven.Client(settings.RAVEN_CONFIG['dsn']) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client) except AttributeError: pass
def on_configure(self): sentry_dsn = getattr(settings, 'SENTRY_DSN', None) if sentry_dsn is None: return super(MyCelery, self).on_configure() client = raven.Client(sentry_dsn) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def register_handlers(): from django.core.signals import got_request_exception # Connect to Django's internal signal handler got_request_exception.connect(sentry_exception_handler) # If Celery is installed, register a signal handler if 'djcelery' in django_settings.INSTALLED_APPS: from raven.contrib.celery import register_signal register_signal(client)
def bootstrap_pyramid(signal, sender): from pyramid.paster import bootstrap ini_path = getattr(celeryconfig, 'DD_PYRAMID_INI') sender.app.settings = bootstrap(ini_path)['registry'].settings # setup sentry logging dsn = getattr(celeryconfig, 'DD_SENTRY_DSN', None) if dsn is not None: from raven import Client client = Client(dsn=dsn) from raven.contrib.celery import register_signal register_signal(client)
def on_configure(self): client = raven.Client( sentry_url, auto_log_stacks=True # add detailed stack_trace ) # register a custom filter to filter out # duplicate logs register_logger_signal(client) # hook into the # Celery error # handler register_signal(client)
def register_handlers(): from django.core.signals import got_request_exception, request_started def before_request(*args, **kwargs): client.context.activate() request_started.connect(before_request, weak=False) # HACK: support Sentry's internal communication if 'sentry' in settings.INSTALLED_APPS: from django.db import transaction # Django 1.6 if hasattr(transaction, 'atomic'): commit_on_success = transaction.atomic else: commit_on_success = transaction.commit_on_success @commit_on_success def wrap_sentry(request, **kwargs): if transaction.is_dirty(): transaction.rollback() return sentry_exception_handler(request, **kwargs) exception_handler = wrap_sentry else: exception_handler = sentry_exception_handler # Connect to Django's internal signal handler got_request_exception.connect(exception_handler, weak=False) # If Celery is installed, register a signal handler if 'djcelery' in settings.INSTALLED_APPS: try: # Celery < 2.5? is not supported from raven.contrib.celery import ( register_signal, register_logger_signal) except ImportError: logger.exception('Failed to install Celery error handler') else: try: register_signal(client) except Exception: logger.exception('Failed to install Celery error handler') try: ga = lambda x, d=None: getattr(settings, 'SENTRY_%s' % x, d) options = getattr(settings, 'RAVEN_CONFIG', {}) loglevel = options.get('celery_loglevel', ga('CELERY_LOGLEVEL', logging.ERROR)) register_logger_signal(client, loglevel=loglevel) except Exception: logger.exception('Failed to install Celery error handler')
def ready(self): # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object("django.conf:settings") app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True) if hasattr(settings, "RAVEN_CONFIG"): # Celery signal registration from raven import Client from raven.contrib.celery import register_signal client = Client(dsn=settings.RAVEN_CONFIG["dsn"]) register_signal(client)
def flask_app(self): if has_app_context(): return unwrap(flask_current_app) self.flask_app_factory = symbol_by_name(self.flask_app_factory) app = self.flask_app_factory() if "sentry" in app.extensions: from raven.contrib.celery import register_signal, register_logger_signal client = app.extensions["sentry"].client client.tags["process_type"] = "celery task" register_signal(client) register_logger_signal(client) register_after_fork(app, self._setup_after_fork) return app
def init_sentry(app): sentry = IndicoSentry(dsn=config.SENTRY_DSN, wrap_wsgi=False, register_signal=True, logging=False) sentry.init_app(app) # setup logging manually and exclude uncaught indico exceptions. # these are logged manually in the flask error handler logic so # we get the X-Sentry-ID header which is not populated in the # logging handlers handler = SentryHandler(sentry.client, level=getattr(logging, config.SENTRY_LOGGING_LEVEL)) handler.addFilter(BlacklistFilter({'indico.flask'})) setup_logging(handler) # connect to the celery logger register_logger_signal(sentry.client) register_signal(sentry.client)
def init_app(self, app, sentry): self.app = app new_celery = celery.Celery( app.import_name, broker=app.config["CELERY_BROKER_URL"], backend=app.config["CELERY_RESULT_BACKEND"], ) # XXX(dcramer): why the hell am I wasting time trying to make Celery work? self.celery.__dict__.update(vars(new_celery)) self.celery.conf.update(app.config) worker_process_init.connect(self._worker_process_init) task_postrun.connect(self._task_postrun) task_prerun.connect(self._task_prerun) if sentry: register_signal(sentry.client) register_logger_signal(sentry.client)
def init_task_config(celery_app): global _inited if _inited: return rootdir = dirname(dirname(dirname(realpath(__file__)))) settings_file = join(rootdir, 'local.ini') if not exists(settings_file): settings_file = join(rootdir, 'development.ini') settings = get_appsettings(settings_file, 'assembl') config = ConfigParser.SafeConfigParser() config.read(settings_file) try: pipeline = config.get('pipeline:main', 'pipeline').split() if 'raven' in pipeline: global raven_client raven_dsn = config.get('filter:raven', 'dsn') from raven import Client from raven.contrib.celery import (register_signal, register_logger_signal) raven_client = Client(raven_dsn) register_logger_signal(raven_client) register_signal(raven_client) except ConfigParser.Error: pass registry = getGlobalSiteManager() registry.settings = settings set_config(settings) configure_engine(settings, False) configure(registry, celery_app.main) config_celery_app(celery_app, settings) from threaded_model_watcher import ThreadDispatcher threaded_watcher_class_name = settings.get( '%s.threadedmodelwatcher' % (celery_app.main, ), "assembl.lib.model_watcher.ModelEventWatcherPrinter") ThreadDispatcher.mw_class = resolver.resolve(threaded_watcher_class_name) # Global celery apps from notify import notify_celery_app if notify_celery_app.main != celery_app.main: config_celery_app(notify_celery_app, settings) from notification_dispatch import notif_dispatch_celery_app if notif_dispatch_celery_app.main != celery_app.main: config_celery_app(notif_dispatch_celery_app, settings) _inited = True
def install_handler(self, app): """Install log handler.""" from raven.contrib.celery import register_logger_signal, \ register_signal from raven.contrib.flask import Sentry from raven.handlers.logging import SentryHandler # Installs sentry in app.extensions['sentry'] level = getattr(logging, app.config['LOGGING_SENTRY_LEVEL']) # Get the Sentry class. cls = app.config['LOGGING_SENTRY_CLASS'] if cls: if isinstance(cls, six.string_types): cls = import_string(cls) else: cls = Sentry sentry = cls(app, logging=True, level=level) app.logger.addHandler(SentryHandler(client=sentry.client, level=level)) # Capture warnings from warnings module if app.config['LOGGING_SENTRY_PYWARNINGS']: self.capture_pywarnings(SentryHandler(sentry.client)) # Setup Celery logging to Sentry if app.config['LOGGING_SENTRY_CELERY']: try: register_logger_signal(sentry.client, loglevel=level) except TypeError: # Compatibility mode for Raven<=5.1.0 register_logger_signal(sentry.client) register_signal(sentry.client) # Werkzeug only adds a stream handler if there's no other handlers # defined, so when Sentry adds a log handler no output is # received from Werkzeug unless we install a console handler # here on the werkzeug logger. if app.debug: logger = logging.getLogger('werkzeug') logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler())
def flask_app(self) -> Flask: if has_app_context(): return unwrap(flask_current_app) self.flask_app_factory = symbol_by_name(self.flask_app_factory) app = self.flask_app_factory() if "sentry" in app.extensions: # pyre-fixme[21]: Could not find `raven`. from raven.contrib.celery import register_signal, register_logger_signal client = app.extensions["sentry"].client client.tags["process_type"] = "celery task" register_signal(client) register_logger_signal(client) # pyre-fixme[16]: Module `multiprocessing` has no attribute `util`. register_after_fork(app, self._setup_after_fork) return app
def create_celery(app=None): application = app or create_app() celery = Celery(application.import_name, broker=application.config.get("CELERY_BROKER_URL_NOTIFICATIONS")) celery.conf.update(application.config) TaskBase = celery.Task class ContextTask(TaskBase): abstract = True def __call__(self, *args, **kwargs): with application.app_context(): return TaskBase.__call__(self, *args, **kwargs) celery.Task = ContextTask # logging client = Client(dsn=application.config.get('SENTRY_DSN')) register_logger_signal(client, loglevel='ERROR') register_signal(client) return celery
def get_sentry_client(app_conf): client = Client(app_conf.SENTRY_DSN) # register a custom filter to filter out duplicate logs register_logger_signal(client) # The register_logger_signal function can also take an optional argument # `loglevel` which is the level used for the handler created. # Defaults to `logging.ERROR` register_logger_signal(client, loglevel=logging.INFO) # hook into the Celery error handler register_signal(client) # The register_signal function can also take an optional argument # `ignore_expected` which causes exception classes specified in Task.throws # to be ignored register_signal(client, ignore_expected=True) return client
def register_handlers(): from django.core.signals import got_request_exception # HACK: support Sentry's internal communication if 'sentry' in django_settings.INSTALLED_APPS: from django.db import transaction # Django 1.6 if hasattr(transaction, 'atomic'): commit_on_success = transaction.atomic else: commit_on_success = transaction.commit_on_success @commit_on_success def wrap_sentry(request, **kwargs): if transaction.is_dirty(): transaction.rollback() return sentry_exception_handler(request, **kwargs) exception_handler = wrap_sentry else: exception_handler = sentry_exception_handler # Connect to Django's internal signal handler got_request_exception.connect(exception_handler, weak=False) # If Celery is installed, register a signal handler if 'djcelery' in django_settings.INSTALLED_APPS: try: # Celery < 2.5? is not supported from raven.contrib.celery import (register_signal, register_logger_signal) except ImportError: logger.exception('Failed to install Celery error handler') else: try: register_signal(client) except Exception: logger.exception('Failed to install Celery error handler') try: register_logger_signal(client) except Exception: logger.exception('Failed to install Celery error handler')
def init_app(app): if app.config['SENTRY_DSN']: try: from raven.contrib.celery import ( register_signal, register_logger_signal ) from raven.contrib.flask import Sentry except ImportError: log.error('raven is required to use Sentry') return sentry = Sentry() tags = app.config['SENTRY_TAGS'] log_level_name = app.config['SENTRY_LOGGING'] if log_level_name: log_level = getattr(logging, log_level_name.upper()) if log_level: sentry.logging = True sentry.level = log_level # Do not send HTTPExceptions exceptions = set(app.config['SENTRY_IGNORE_EXCEPTIONS']) for exception in IGNORED_EXCEPTIONS: exceptions.add(exception) app.config['SENTRY_IGNORE_EXCEPTIONS'] = list(exceptions) app.config['SENTRY_PUBLIC_DSN'] = public_dsn(app.config['SENTRY_DSN']) # Versions Management: uData and plugins versions as tags. for dist in entrypoints.get_plugins_dists(app): if dist.version: tags[dist.project_name] = dist.version # Do not forget udata itself tags['udata'] = pkg_resources.get_distribution('udata').version sentry.init_app(app) # register a custom filter to filter out duplicate logs register_logger_signal(sentry.client, loglevel=sentry.level) # hook into the Celery error handler register_signal(sentry.client)
def register_handlers(): from django.core.signals import got_request_exception, request_started def before_request(*args, **kwargs): client.context.activate() request_started.connect(before_request, weak=False) if u'sentry' in settings.INSTALLED_APPS: from django.db import transaction if hasattr(transaction, u'atomic'): commit_on_success = transaction.atomic else: commit_on_success = transaction.commit_on_success @commit_on_success def wrap_sentry(request, **kwargs): if transaction.is_dirty(): transaction.rollback() return sentry_exception_handler(request, **kwargs) exception_handler = wrap_sentry else: exception_handler = sentry_exception_handler got_request_exception.connect(exception_handler, weak=False) if u'djcelery' in settings.INSTALLED_APPS: try: from raven.contrib.celery import register_signal, register_logger_signal except ImportError: logger.exception(u'Failed to install Celery error handler') else: try: register_signal(client) except Exception: logger.exception(u'Failed to install Celery error handler') try: ga = lambda x, d = None: getattr(settings, u'SENTRY_%s' % x, d) options = getattr(settings, u'RAVEN_CONFIG', {}) loglevel = options.get(u'celery_loglevel', ga(u'CELERY_LOGLEVEL', logging.ERROR)) register_logger_signal(client, loglevel=loglevel) except Exception: logger.exception(u'Failed to install Celery error handler')
def celery_inject_sentry(celery): """Inject Sentry into a celery app. Requires ``raven``. If ``SENTRY_DSN`` is specified in config, a sentry client is created and attached. Additionally uses ``celery.version`` and ``SENTRY_SITE`` to provide extra context to sentry events. :param celery: The celery instance to attach raven to. """ if celery.conf.get('SENTRY_DSN'): client = create_client(celery.conf, app_version=getattr(celery, 'version', 'UNKNOWN'), ignore_common_http=False) register_logger_signal(client) register_signal(client) return client return None
def bootstrap_worker(sender, **kwargs): base_url = os.environ.get('APP_URL') config_uri = os.environ.get('CONFIG_URI', 'conf/app.ini') paster.setup_logging(config_uri) if base_url is None: base_url = 'http://localhost' log.warn('APP_URL not found in environment, using default: %s', base_url) request = Request.blank('/', base_url=base_url) env = paster.bootstrap(config_uri, request=request) request.root = env['root'] sender.app.request = request # Configure Sentry reporting on task failure register_signal(request.sentry) register_logger_signal(request.sentry, loglevel=logging.ERROR)
def create_celery_app(app=None): app = app or create_app('apollo', '', register_all_blueprints=False) celery = Celery(__name__, broker=app.config['CELERY_BROKER_URL']) # configure exception logging client = Client(app.config.get('SENTRY_DSN', '')) register_logger_signal(client) register_signal(client) celery.conf.update(app.config) TaskBase = celery.Task class ContextTask(TaskBase): abstract = True def __call__(self, *args, **kwargs): with app.app_context(): return TaskBase.__call__(self, *args, **kwargs) celery.Task = ContextTask return celery
def init_app(app): if 'SENTRY_DSN' in app.config: try: from raven.contrib.celery import (register_signal, register_logger_signal) from raven.contrib.flask import Sentry except: log.error('raven[flask] is required to use sentry') return sentry = Sentry() app.config.setdefault('SENTRY_USER_ATTRS', ['slug', 'email', 'fullname']) app.config.setdefault('SENTRY_LOGGING', 'WARNING') log_level_name = app.config.get('SENTRY_LOGGING') if log_level_name: log_level = getattr(logging, log_level_name.upper()) if log_level: sentry.logging = True sentry.level = log_level # Do not send HTTPExceptions exceptions = app.config.get('RAVEN_IGNORE_EXCEPTIONS', []) if HTTPException not in exceptions: exceptions.append(HTTPException) if PermissionDenied not in exceptions: exceptions.append(PermissionDenied) app.config['RAVEN_IGNORE_EXCEPTIONS'] = exceptions app.config['SENTRY_PUBLIC_DSN'] = public_dsn(app.config['SENTRY_DSN']) sentry.init_app(app) # register a custom filter to filter out duplicate logs register_logger_signal(sentry.client, loglevel=sentry.level) # hook into the Celery error handler register_signal(sentry.client)
def init_celery(app, celery): """ This function creates a new Celery object, configures it with the broker from the application config, updates the rest of the Celery config from the Flask config and then creates a subclass of the task that wraps the task execution in an application context. """ client = Client(app.config['SENTRY_DSN']) register_logger_signal(client) register_signal(client) celery.conf.update(app.config) TaskBase = celery.Task class ContextTask(TaskBase): abstract = True def __call__(self, *args, **kwargs): with app.app_context(): return TaskBase.__call__(self, *args, **kwargs) celery.Task = ContextTask
def ready(self): # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True) app.conf.update( result_backend='rpc://', result_expires=3600, ) if hasattr(settings, 'RAVEN_CONFIG'): # Celery signal registration from raven import Client as RavenClient from raven.contrib.celery import register_signal from raven.contrib.celery import register_logger_signal raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) register_logger_signal(raven_client) register_signal(raven_client)
current_settings_module = os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings.base") print('(Celery App) DJANGO_SETTINGS_MODULE env variable detected: %s Unless overriden, this will ' 'be used.' % current_settings_module) logger = getLogger('mysite.celery_app') # Setup Sentry error logging. client = Client(settings.RAVEN_CONFIG['dsn']) # register a custom filter to filter out duplicate logs register_logger_signal(client) # The register_logger_signal function can also take an optional argument # `loglevel` which is the level used for the handler created. # Defaults to `logging.ERROR` register_logger_signal(client) # hook into the Celery error handler register_signal(client) # The register_signal function can also take an optional argument # `ignore_expected` which causes exception classes specified in Task.throws # to be ignored register_signal(client) # # setdefault ONLY sets the default value if the key (ie: DJANGO_SETTINGS_MODULE) # does not already exist. for dev machines, you should set it to # mysite.settings.local in /etc/profile on linux (see comments in # manage.py for more explanation) os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings.production') app = Celery('mysite') # Using a string here means the worker don't have to serialize
def _configure_celery(*args, **kwargs): config = configure() register_logger_signal(config.registry["raven.client"]) register_signal(config.registry["raven.client"])
def create_app(_read_config=True, **config): app = flask.Flask(__name__, static_folder=None, template_folder=os.path.join(PROJECT_ROOT, 'templates')) app.wsgi_app = ProxyFix(app.wsgi_app) # app.wsgi_app = TracerMiddleware(app.wsgi_app, app) # This key is insecure and you should override it on the server app.config[ 'SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1' app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///changes' app.config['SQLALCHEMY_POOL_SIZE'] = 60 app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20 # required for flask-debugtoolbar app.config['SQLALCHEMY_RECORD_QUERIES'] = True app.config['REDIS_URL'] = 'redis://localhost/0' app.config['DEBUG'] = True app.config['HTTP_PORT'] = 5000 app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 app.config['API_TRACEBACKS'] = True # default snapshot ID to use when no project-specific active image available app.config['DEFAULT_SNAPSHOT'] = None app.config['SNAPSHOT_S3_BUCKET'] = None app.config['LXC_PRE_LAUNCH'] = None app.config['LXC_POST_LAUNCH'] = None app.config['CELERY_ACCEPT_CONTENT'] = ['changes_json'] app.config['CELERY_ACKS_LATE'] = True app.config['CELERY_BROKER_URL'] = 'redis://localhost/0' app.config['CELERY_DEFAULT_QUEUE'] = "default" app.config['CELERY_DEFAULT_EXCHANGE'] = "default" app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct" app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default" app.config['CELERY_DISABLE_RATE_LIMITS'] = True app.config['CELERY_IGNORE_RESULT'] = True app.config['CELERY_RESULT_BACKEND'] = None app.config['CELERY_RESULT_SERIALIZER'] = 'changes_json' app.config['CELERY_SEND_EVENTS'] = False app.config['CELERY_TASK_RESULT_EXPIRES'] = 1 app.config['CELERY_TASK_SERIALIZER'] = 'changes_json' app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1 app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000 app.config['CELERY_QUEUES'] = ( Queue('job.sync', routing_key='job.sync'), Queue('job.create', routing_key='job.create'), Queue('celery', routing_key='celery'), Queue('events', routing_key='events'), Queue('default', routing_key='default'), Queue('repo.sync', Exchange('fanout', 'fanout'), routing_key='repo.sync'), ) app.config['CELERY_ROUTES'] = { 'create_job': { 'queue': 'job.create', 'routing_key': 'job.create', }, 'sync_job': { 'queue': 'job.sync', 'routing_key': 'job.sync', }, 'sync_job_step': { 'queue': 'job.sync', 'routing_key': 'job.sync', }, 'sync_build': { 'queue': 'job.sync', 'routing_key': 'job.sync', }, 'check_repos': { 'queue': 'repo.sync', 'routing_key': 'repo.sync', }, 'sync_repo': { 'queue': 'repo.sync', 'routing_key': 'repo.sync', }, 'run_event_listener': { 'queue': 'events', 'routing_key': 'events', }, 'fire_signal': { 'queue': 'events', 'routing_key': 'events', }, } app.config['EVENT_LISTENERS'] = ( ('changes.listeners.mail.job_finished_handler', 'job.finished'), ('changes.listeners.green_build.build_finished_handler', 'build.finished'), ('changes.listeners.hipchat.build_finished_handler', 'build.finished'), ('changes.listeners.build_revision.revision_created_handler', 'revision.created'), ('changes.listeners.phabricator_listener.build_finished_handler', 'build.finished'), ) # restrict outbound notifications to the given domains app.config['MAIL_DOMAIN_WHITELIST'] = () app.config['DEBUG_TB_ENABLED'] = True # celerybeat must be running for our cleanup tasks to execute # e.g. celery worker -B app.config['CELERYBEAT_SCHEDULE'] = { 'cleanup-tasks': { 'task': 'cleanup_tasks', 'schedule': timedelta(minutes=1), }, 'check-repos': { 'task': 'check_repos', 'schedule': timedelta(minutes=5), }, } app.config['CELERY_TIMEZONE'] = 'UTC' app.config['SENTRY_DSN'] = None app.config['SENTRY_INCLUDE_PATHS'] = [ 'changes', ] app.config['JENKINS_AUTH'] = None app.config['JENKINS_URL'] = None app.config['JENKINS_TOKEN'] = None app.config['JENKINS_CLUSTERS'] = {} app.config['KOALITY_URL'] = None app.config['KOALITY_API_KEY'] = None app.config['GOOGLE_CLIENT_ID'] = None app.config['GOOGLE_CLIENT_SECRET'] = None app.config['GOOGLE_DOMAIN'] = None app.config['REPO_ROOT'] = None app.config['DEFAULT_FILE_STORAGE'] = 'changes.storage.s3.S3FileStorage' app.config['S3_ACCESS_KEY'] = None app.config['S3_SECRET_KEY'] = None app.config['S3_BUCKET'] = None app.config['PHABRICATOR_HOST'] = None app.config['PHABRICATOR_USERNAME'] = None app.config['PHABRICATOR_CERT'] = None app.config['MAIL_DEFAULT_SENDER'] = 'changes@localhost' app.config['BASE_URI'] = 'http://localhost:5000' app.config.update(config) if _read_config: if os.environ.get('CHANGES_CONF'): # CHANGES_CONF=/etc/changes.conf.py app.config.from_envvar('CHANGES_CONF') else: # Look for ~/.changes/changes.conf.py path = os.path.normpath( os.path.expanduser('~/.changes/changes.conf.py')) app.config.from_pyfile(path, silent=True) # default the DSN for changes-client to the server's DSN app.config.setdefault('CLIENT_SENTRY_DSN', app.config['SENTRY_DSN']) if not app.config['BASE_URI']: raise ValueError('You must set ``BASE_URI`` in your configuration.') parsed_url = urlparse(app.config['BASE_URI']) app.config.setdefault('SERVER_NAME', parsed_url.netloc) app.config.setdefault('PREFERRED_URL_SCHEME', parsed_url.scheme) if app.debug: app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0 else: app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 30 app.url_map.converters['uuid'] = UUIDConverter # init sentry first sentry.init_app(app) @app.before_request def capture_user(*args, **kwargs): if 'uid' in session: sentry.client.user_context({ 'id': session['uid'], 'email': session['email'], }) api.init_app(app) db.init_app(app) mail.init_app(app) queue.init_app(app) redis.init_app(app) statsreporter.init_app(app) configure_debug_toolbar(app) from raven.contrib.celery import register_signal, register_logger_signal register_signal(sentry.client) register_logger_signal(sentry.client) # configure debug routes first if app.debug: configure_debug_routes(app) configure_templates(app) # TODO: these can be moved to wsgi app entrypoints configure_api_routes(app) configure_web_routes(app) configure_jobs(app) return app
def on_configure(self): # register a custom filter to filter out duplicate logs register_logger_signal(raven_client) # hook into the Celery error handler register_signal(raven_client)
from django.conf import settings from raven import Client from raven.contrib.celery import register_signal client = Client(settings.RAVEN_CONFIG['dsn']) # register a custom filter to filter out duplicate logs # register_logger_signal(client) # The register_logger_signal function can also take an optional argument # `loglevel` which is the level used for the handler created. # Defaults to `logging.ERROR` # register_logger_signal(client, loglevel=logging.INFO) # hook into the Celery error handler register_signal(client) # The register_signal function can also take an optional argument # `ignore_expected` which causes exception classes specified in Task.throws # to be ignored register_signal(client, ignore_expected=True)
def on_configure(self): client = raven.Client(settings.RAVEN_CONFIG['dsn']) # register a custom filter to filter out duplicate logs register_logger_signal(client) # hook into the Celery error handler register_signal(client)
def create_app(config={}): app = Flask('aleph') app.config.from_object(settings) app.config.update(config) if 'postgres' not in settings.DATABASE_URI: raise RuntimeError("aleph database must be PostgreSQL!") app.config.update({ 'SQLALCHEMY_DATABASE_URI': settings.DATABASE_URI, 'BABEL_DOMAIN': 'aleph' }) queue = Queue(settings.QUEUE_NAME, routing_key=settings.QUEUE_ROUTING_KEY, queue_arguments={'x-max-priority': 9}) celery.conf.update( imports=('aleph.queues'), broker_url=settings.BROKER_URI, # broker_connection_retry=False, broker_connection_max_retries=3, broker_pool_limit=None, task_always_eager=settings.EAGER, task_eager_propagates=True, task_ignore_result=True, task_acks_late=True, task_queues=(queue, ), task_default_queue=settings.QUEUE_NAME, task_default_routing_key=settings.QUEUE_ROUTING_KEY, worker_max_tasks_per_child=500, worker_disable_rate_limits=True, result_persistent=False, beat_schedule={ 'hourly': { 'task': 'aleph.logic.scheduled.hourly', 'schedule': crontab(hour='*', minute=0) }, 'daily': { 'task': 'aleph.logic.scheduled.daily', 'schedule': crontab(hour=5, minute=0) } }, ) migrate.init_app(app, db, directory=settings.ALEMBIC_DIR) configure_oauth(app) mail.init_app(app) db.init_app(app) babel.init_app(app) cache.init_app(app, config={'CACHE_TYPE': 'simple'}) CORS(app, origins=settings.CORS_ORIGINS) # Enable raven to submit issues to sentry if a DSN is defined. This will # report errors from Flask and Celery operation modes to Sentry. if settings.SENTRY_DSN: sentry.init_app(app, dsn=settings.SENTRY_DSN, logging=True, level=logging.ERROR) register_logger_signal(sentry.client) register_signal(sentry.client, ignore_expected=True) # This executes all registered init-time plugins so that other # applications can register their behaviour. for plugin in get_extensions('aleph.init'): plugin(app=app) return app
def create_app(config={}): app = Flask('aleph') app.config.from_object(settings) app.config.update(config) if 'postgres' not in settings.DATABASE_URI: raise RuntimeError("aleph database must be PostgreSQL!") app.config.update({ 'SQLALCHEMY_DATABASE_URI': settings.DATABASE_URI, 'BABEL_DOMAIN': 'aleph' }) queue = Queue(settings.QUEUE_NAME, routing_key=settings.QUEUE_ROUTING_KEY, queue_arguments={'x-max-priority': 9}) celery.conf.update( imports=('aleph.queues'), broker_url=settings.BROKER_URI, task_always_eager=settings.EAGER, task_eager_propagates=True, task_ignore_result=True, task_acks_late=False, task_queues=(queue,), task_default_queue=settings.QUEUE_NAME, task_default_routing_key=settings.QUEUE_ROUTING_KEY, worker_max_tasks_per_child=1000, result_persistent=False, beat_schedule={ 'hourly': { 'task': 'aleph.logic.scheduled.hourly', 'schedule': crontab(hour='*', minute=0) }, 'daily': { 'task': 'aleph.logic.scheduled.daily', 'schedule': crontab(hour=5, minute=0) } }, ) migrate.init_app(app, db, directory=settings.ALEMBIC_DIR) configure_oauth(app) mail.init_app(app) db.init_app(app) babel.init_app(app) CORS(app, origins=settings.CORS_ORIGINS) # Enable raven to submit issues to sentry if a DSN is defined. This will # report errors from Flask and Celery operation modes to Sentry. if settings.SENTRY_DSN: sentry.init_app(app, dsn=settings.SENTRY_DSN, logging=True, level=logging.ERROR) register_logger_signal(sentry.client) register_signal(sentry.client, ignore_expected=True) # This executes all registered init-time plugins so that other # applications can register their behaviour. for plugin in get_extensions('aleph.init'): plugin(app=app) # Set up opencensus tracing and its integrations. Export collected traces # to Stackdriver Trace on a background thread. if settings.STACKDRIVER_TRACE_PROJECT_ID: exporter = stackdriver_exporter.StackdriverExporter( project_id=settings.STACKDRIVER_TRACE_PROJECT_ID, transport=BackgroundThreadTransport ) sampler = probability.ProbabilitySampler( rate=settings.TRACE_SAMPLING_RATE ) blacklist_paths = ['/healthz', ] FlaskMiddleware( app, exporter=exporter, sampler=sampler, blacklist_paths=blacklist_paths ) integrations = ['postgresql', 'sqlalchemy', 'httplib'] config_integration.trace_integrations(integrations) # Set up logging setup_stackdriver_logging() return app
def on_configure(self): """Set up sentry client.""" dsn = os.environ.get("SENTRY_DSN") client = raven.Client(dsn) register_logger_signal(client) register_signal(client)
log = commonware.log.getLogger('z.task') app = Celery('olympia') task = app.task app.config_from_object('django.conf:settings') app.autodiscover_tasks(settings.INSTALLED_APPS) # Hook up Sentry in celery. raven_client = Client(settings.SENTRY_DSN) # register a custom filter to filter out duplicate logs register_logger_signal(raven_client) # hook into the Celery error handler register_signal(raven_client) # After upgrading raven we can specify loglevel=logging.INFO to override # the default (which is ERROR). register_logger_signal(raven_client) @task_failure.connect def process_failure_signal(exception, traceback, sender, task_id, signal, args, kwargs, einfo, **kw): """Catch any task failure signals from within our worker processes and log them as exceptions, so they appear in Sentry and ordinary logging output.""" exc_info = (type(exception), exception, traceback) log.error(u'Celery TASK exception: {0.__name__}: {1}'.format(*exc_info),
def on_configure(self): if settings.SENTRY_DSN: client = raven.Client(settings.SENTRY_DSN) register_logger_signal(client) # defaults to logging.ERROR register_signal(client)
def create_app(conf_class=None): # create and configure application in runtime app = Flask('app.application') if conf_class is None: app.config.from_object(f'{CONFIG_MODULE}.{os.environ["CONFIG_CLASS"]}') else: app.config.from_object(f'{CONFIG_MODULE}.{conf_class}') app.jinja_env.undefined = app.config[ 'JINJA_UNDEFINED'] # maybe there is a better way to pass this variable # init extensions ext.db.init_app(app) ext.cache.init_app(app) ext.migrate.init_app(app, ext.db) ext.compress.init_app(app) ext.assets.init_app(app) ext.sentry.init_app(app) ext.html_min.init_app(app) ext.login_manager.init_app(app) create_celery(app, ext.celery) # logging for handler in app.config['LOGGING_HANDLERS']: handler.setLevel(app.config['LOGGING_LEVEL']) app.logger.addHandler(handler) # send celery errors ext.client.set_dsn(app.config['SENTRY_CONFIG']['dsn']) register_logger_signal(ext.client) register_signal(ext.client) register_logger_signal(ext.client, loglevel=app.config['LOGGING_LEVEL']) # static slim js = Bundle( os.path.join('js', 'main.js'), filters='jsmin', output=os.path.join(app.config['COMPRESS_STATIC_DIR'], 'packed.js'), ) css = Bundle(os.path.join('css', 'main.css'), filters='cssmin', output=os.path.join(app.config['COMPRESS_STATIC_DIR'], 'packed.css')) ext.assets.register('js_all', js) ext.assets.register('css_all', css) # urls from app.account.views import blueprint as account_bp app.register_blueprint(account_bp) # register error handlers app.register_error_handler(400, error_handlers.bad_request) app.register_error_handler(401, error_handlers.unauthorized) app.register_error_handler(403, error_handlers.forbidden) app.register_error_handler(404, error_handlers.not_found) app.register_error_handler(410, error_handlers.gone) app.register_error_handler(500, error_handlers.internal_server) # debug toolbar if app.config['DEBUG']: from flask_debugtoolbar import DebugToolbarExtension DebugToolbarExtension(app) return app
def on_configure(self): from raven.contrib.celery import register_signal, register_logger_signal from raven.contrib.django.raven_compat.models import client as raven_client register_logger_signal(raven_client) register_signal(raven_client)