Esempio n. 1
0
def json_logging():
    """
    Initializes the celery logging so that it uses
    our custom json formatter.
    """
    from celery.signals import after_setup_logger
    from celery.signals import after_setup_task_logger

    after_setup_logger.connect(json_formatter)
    after_setup_task_logger.connect(json_task_formatter)
Esempio n. 2
0
File: utils.py Progetto: 2ps/djenga
def json_logging():
    """
    Initializes the celery logging so that it uses
    our custom json formatter.
    """
    from celery.signals import after_setup_logger
    from celery.signals import after_setup_task_logger

    after_setup_logger.connect(json_formatter)
    after_setup_task_logger.connect(json_task_formatter)
Esempio n. 3
0
def add_sentry_handler_to_celery_task_logger(client, sentry_handler_log_level):
    handler = SentryHandler(client)
    handler.setLevel(sentry_handler_log_level)

    def process_task_logger_event(sender, logger, loglevel, logfile, format,
                             colorize, **kw):
        for h in logger.handlers:
            if type(h) == SentryHandler:
                return False
        logger.addHandler(handler)

    after_setup_task_logger.connect(process_task_logger_event, weak=False)
Esempio n. 4
0
def create_celery_app(app):
    celery = Celery(app.import_name, broker=app.config['BROKER_URL'])
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask

    def setup_tasks_logging(**kwargs):
        logging.config.dictConfig(app.config.get('LOGGING', {}))

    after_setup_task_logger.connect(setup_tasks_logging)

    return celery
Esempio n. 5
0
            stop_dynamic_analysis(analyzer=analyzer,
                                  pkgname=pkgname,
                                  language=language,
                                  pkgversion=pkgversion)
        except Exception as e:
            logger.error(
                "fail to stop dynamic_worker for pkg %s language %s version %s: %s",
                pkgname, language, pkgversion, str(e))
        if analyzer.FAILURE_FILE and exists(dirname(analyzer.FAILURE_FILE)):
            open(analyzer.FAILURE_FILE, 'a').write(pkgname + '\n')
        return 0
    except Exception as e:
        logger.error("dynamic_worker: %s (type: %s)", str(e), type(e))
        try:
            stop_dynamic_analysis(analyzer=analyzer,
                                  pkgname=pkgname,
                                  language=language,
                                  pkgversion=pkgversion)
        except Exception as e:
            logger.error(
                "fail to stop dynamic_worker for pkg %s language %s version %s: %s",
                pkgname, language, pkgversion, str(e))
        if analyzer.FAILURE_FILE and exists(dirname(analyzer.FAILURE_FILE)):
            open(analyzer.FAILURE_FILE, 'a').write(pkgname + '\n')
        return 0


# need to use registered instance for sender argument.
task_prerun.connect(init_task)
after_setup_task_logger.connect(setup_logging)
Esempio n. 6
0
def setup_app(app):
    """Setup Sentry extension."""
    app.config.setdefault('SENTRY_DSN', None)
    # Sanitize data more
    app.config.setdefault('SENTRY_PROCESSORS', (
        'raven.processors.SanitizePasswordsProcessor',
        'invenio.ext.logging.backends.sentry.InvenioSanitizeProcessor',
    ))
    # When a user is logged in, also include the user info in the log message.
    app.config.setdefault('SENTRY_USER_ATTRS', ['info', ])
    # Defaults to only reporting errors and warnings.
    app.config.setdefault('LOGGING_SENTRY_LEVEL', 'WARNING')
    # Send warnings to Sentry?
    app.config.setdefault('LOGGING_SENTRY_INCLUDE_WARNINGS', True)
    # Send Celery log messages to Sentry?
    app.config.setdefault('LOGGING_SENTRY_CELERY', True)
    # Transport mechanism for Celery. Defaults to synchronous transport.
    # See http://raven.readthedocs.org/en/latest/transports/index.html
    app.config.setdefault('LOGGING_SENTRY_CELERY_TRANSPORT', 'sync')

    if app.config['SENTRY_DSN']:
        # Detect Invenio requirements and add to Sentry include paths so
        # version information about them is added to the log message.
        app.config.setdefault('SENTRY_INCLUDE_PATHS', sentry_include_paths())

        # Fix-up known version problems getting version information
        # Patch submitted to raven-python, if accepted the following lines
        # can be removed:
        # https://github.com/getsentry/raven-python/pull/452
        from raven.utils import _VERSION_CACHE
        import numpy
        import webassets
        import setuptools
        _VERSION_CACHE['invenio'] = invenio.__version__
        _VERSION_CACHE['numpy'] = numpy.__version__
        _VERSION_CACHE['webassets'] = webassets.__version__
        _VERSION_CACHE['setuptools'] = setuptools.__version__

        # Modify Sentry transport for Celery - must be called prior to client
        # creation.
        celery_dsn_fix(app)

        # Installs sentry in app.extensions['sentry']
        s = Sentry(
            app,
            logging=True,
            level=getattr(logging, app.config['LOGGING_SENTRY_LEVEL'])
        )

        # Replace method with more robust version
        s.add_sentry_id_header = add_sentry_id_header

        # Add extra tags information to sentry.
        s.client.extra_context({'version': invenio.__version__})

        # Capture warnings from warnings module
        if app.config['LOGGING_SENTRY_INCLUDE_WARNINGS']:
            setup_warnings(s)

        # Setup Celery logging to Sentry
        if app.config['LOGGING_SENTRY_CELERY']:
            # Setup Celery loggers
            after_setup_task_logger.connect(
                partial(celery_logger_setup, app=app),
                weak=False
            )
            after_setup_logger.connect(
                partial(celery_logger_setup, app=app),
                weak=False
            )

        # Werkzeug only adds a stream handler if there's no other handlers
        # defined, so when Sentry adds a log handler no output is
        # received from Werkzeug unless we install a console handler here on
        # the werkzeug logger.
        if app.debug:
            logger = logging.getLogger('werkzeug')
            logger.setLevel(logging.INFO)
            handler = logging.StreamHandler()
            logger.addHandler(handler)
Esempio n. 7
0
    CELERY_ACCEPT_CONTENT=["json"],  # Ignore other content
    CELERY_RESULT_SERIALIZER="json",
    CELERY_ENABLE_UTC=True,
)


def setup_log(**args):
    logbook.SyslogHandler().push_application()
    logbook.StreamHandler(sys.stderr, bubble=True).push_application()


APP = None


def needs_app_context(f):
    @functools.wraps(f)
    def wrapper(*args, **kwargs):
        global APP

        if APP is None:
            APP = create_app()

        with APP.app_context():
            return f(*args, **kwargs)

    return wrapper


after_setup_logger.connect(setup_log)
after_setup_task_logger.connect(setup_log)
Esempio n. 8
0
LOGSTASH_PORT = getattr(settings, 'LOGSTASH_PORT', None)


def initialize_logstash(logger=None, loglevel=logging.INFO, **kwargs):
    handler = logstash.TCPLogstashHandler(LOGSTASH_HOST,
                                          LOGSTASH_PORT,
                                          tags=['celery'],
                                          message_type='celery',
                                          version=1)
    handler.setLevel(loglevel)
    logger.addHandler(handler)
    return logger


if LOGSTASH_ENABLED:
    after_setup_task_logger.connect(initialize_logstash)
    after_setup_logger.connect(initialize_logstash)

app = Celery('signals')

# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks(['eip', 'influencer', 'stance', 'ethereum_client'])


@app.task(bind=True)
Esempio n. 9
0
def configure_syslog(app):
    if frontend_config.log.syslog:
        app.conf.update(CELERYD_LOG_COLOR=False)
        after_setup_logger.connect(setup_log)
        after_setup_task_logger.connect(setup_log)
Esempio n. 10
0
from celery import Celery
from celery.signals import after_setup_task_logger

MODULE='oii.ifcb.workflow.accession'

celery = Celery(MODULE)

logger = logging.getLogger(MODULE)

def celery_logging(**kw):
    logger = logging.getLogger(MODULE)
    logger.addHandler(logging.StreamHandler())
    logger.setLevel(logging.INFO)

after_setup_task_logger.connect(celery_logging)
   
# example config file
# resolver = oii/ifcb/mvco.xml
# [ditylum]
# psql_connect = user=foobar password=bazquux dbname=ditylum

def list_adcs(time_series,resolver,year_pattern='....'):
    r = parse_stream(resolver)
    for s in r['list_adcs'].resolve_all(time_series=time_series,year_pattern=year_pattern): # FIXME hardcoded
        date = time.strptime(s.date, s.date_format)
        yield s

def list_new_filesets(time_series,psql_connect,resolver,year_pattern='....'):
    feed = IfcbFeed(psql_connect)
    r = parse_stream(resolver)
Esempio n. 11
0
from celery import Celery

celery = Celery('Shekarchi',
                broker='redis://localhost:6379/0',
                backend='redis://localhost:6379/0',
                include=['app.api', 'controller'])

import logging

from celery.signals import after_setup_logger, after_setup_task_logger


def handle_logs(logger=None, loglevel=logging.DEBUG, **kwargs):
    from common import handler
    logger.addHandler(handler)
    return logger


after_setup_task_logger.connect(handle_logs)
after_setup_logger.connect(handle_logs)
Esempio n. 12
0
#SYSLOG_ADDRESS = ('syslogserver', 514)
SYSLOG_ADDRESS = '/dev/log'

import logging
from celery.signals import after_setup_logger, after_setup_task_logger
 
def after_setup_logger_handler(sender=None, logger=None, loglevel=None,
                               logfile=None, format=None,
                               colorize=None, **kwds):
    handler = logging.handlers.SysLogHandler(address=SYSLOG_ADDRESS)
    handler.setFormatter(logging.Formatter(format))
    handler.setLevel(loglevel or logging.INFO)
    logger.addHandler(handler)
 
after_setup_logger.connect(after_setup_logger_handler)
after_setup_task_logger.connect(after_setup_logger_handler)
Esempio n. 13
0
def after_setup_logger_handler(sender=None,
                               logger=None,
                               loglevel=logging.DEBUG,
                               logfile=None,
                               format=None,
                               colorize=None,
                               **kwds):
    handler = logging.handlers.SysLogHandler(
        address='/dev/log', facility=logging.handlers.SysLogHandler.LOG_LOCAL7)
    handler.setFormatter(logging.Formatter(format))
    handler.setLevel(loglevel)
    logger.addHandler(handler)


after_setup_logger.connect(after_setup_logger_handler)
after_setup_task_logger.connect(after_setup_logger_handler)

app = Celery(config.get('APPLICATION_NAME'),
             broker=config.get('CELERY_BROKER'),
             backend=config.get('CELERY_BACKEND'),
             include=[
                 'tasks.slack',
             ])

app.conf.update(
    CELERY_DEFAULT_QUEUE=config.get('CELERY_DEFAULT_QUEUE'),
    CELERY_TASK_RESULT_EXPIRES=config.get('CELERY_TASK_RESULT_EXPIRES'),
    CELERY_TRACK_STARTED=True,
    CELERY_QUEUES=(Queue(config.get('CELERY_DEFAULT_QUEUE'),
                         Exchange(config.get('CELERY_DEFAULT_QUEUE')),
                         routing_key=config.get('CELERY_DEFAULT_QUEUE')), ))
Esempio n. 14
0
                   )

if app.config.get('LOGGING_ENABLED'):
    log_directory = app.config.get('LOGGING_DIRECTORY')
    loglevel = app.config.get('LOGGING_LEVEL')
    handler = _create_log_handler(log_directory)
    # Do not set logging level in the handler.
    # Otherwise, if Flask's DEBUG is set to False,
    # all logging will be disabled.
    # Instead, set the level in the logger object.
    app.logger.setLevel(loglevel)
    app.logger.addHandler(handler)
    # celery uses two loggers: one global/worker logger and a second task logger
    # global/worker logs are handled by the celeryd process running the VM
    # this configures a handler for the task logger:
    after_setup_task_logger.connect(_custom_celery_handler)


@app.before_request
def log_before():
    url = request.path
    method = request.method
    app.logger.debug('Request of type %s made to %s', method, url)


@app.after_request
def log_after(response):
    resp_status = response.status
    resp_headers = response.headers
    app.logger.debug('Response: %s, %s', resp_status, resp_headers)
    return response
Esempio n. 15
0
                   )

if app.config.get('LOGGING_ENABLED'):
    log_directory = app.config.get('LOGGING_DIRECTORY')
    loglevel = app.config.get('LOGGING_LEVEL')
    handler = _create_log_handler(log_directory)
    # Do not set logging level in the handler.
    # Otherwise, if Flask's DEBUG is set to False,
    # all logging will be disabled.
    # Instead, set the level in the logger object.
    app.logger.setLevel(loglevel)
    app.logger.addHandler(handler)
    # celery uses two loggers: one global/worker logger and a second task logger
    # global/worker logs are handled by the celeryd process running the VM
    # this configures a handler for the task logger:
    after_setup_task_logger.connect(_custom_celery_handler)

csrf = CSRFProtect(app)


@app.before_request
def log_before():
    url = request.path
    method = request.method
    app.logger.debug('Request of type %s made to %s', method, url)


@app.after_request
def log_after(response):
    resp_status = response.status
    resp_headers = response.headers
Esempio n. 16
0
def setup_app(app):
    """Setup Sentry extension."""
    app.config.setdefault('SENTRY_DSN', None)
    # Sanitize data more
    app.config.setdefault('SENTRY_PROCESSORS', (
        'raven.processors.SanitizePasswordsProcessor',
        'invenio.ext.logging.backends.sentry.InvenioSanitizeProcessor',
    ))
    # When a user is logged in, also include the user info in the log message.
    app.config.setdefault('SENTRY_USER_ATTRS', [
        'info',
    ])
    # Defaults to only reporting errors and warnings.
    app.config.setdefault('LOGGING_SENTRY_LEVEL', 'WARNING')
    # Send warnings to Sentry?
    app.config.setdefault('LOGGING_SENTRY_INCLUDE_WARNINGS', True)
    # Send Celery log messages to Sentry?
    app.config.setdefault('LOGGING_SENTRY_CELERY', True)
    # Transport mechanism for Celery. Defaults to synchronous transport.
    # See http://raven.readthedocs.org/en/latest/transports/index.html
    app.config.setdefault('LOGGING_SENTRY_CELERY_TRANSPORT', 'sync')

    if app.config['SENTRY_DSN']:
        # Detect Invenio requirements and add to Sentry include paths so
        # version information about them is added to the log message.
        app.config.setdefault('SENTRY_INCLUDE_PATHS', sentry_include_paths())

        # Fix-up known version problems getting version information
        # Patch submitted to raven-python, if accepted the following lines
        # can be removed:
        # https://github.com/getsentry/raven-python/pull/452
        from raven.utils import _VERSION_CACHE
        import numpy
        import webassets
        import setuptools
        _VERSION_CACHE['invenio'] = invenio.__version__
        _VERSION_CACHE['numpy'] = numpy.__version__
        _VERSION_CACHE['webassets'] = webassets.__version__
        _VERSION_CACHE['setuptools'] = setuptools.__version__

        # Modify Sentry transport for Celery - must be called prior to client
        # creation.
        celery_dsn_fix(app)

        # Installs sentry in app.extensions['sentry']
        s = Sentry(app,
                   logging=True,
                   level=getattr(logging, app.config['LOGGING_SENTRY_LEVEL']))

        # Replace method with more robust version
        s.add_sentry_id_header = add_sentry_id_header

        # Add extra tags information to sentry.
        s.client.extra_context({'version': invenio.__version__})

        # Capture warnings from warnings module
        if app.config['LOGGING_SENTRY_INCLUDE_WARNINGS']:
            setup_warnings(s)

        # Setup Celery logging to Sentry
        if app.config['LOGGING_SENTRY_CELERY']:
            # Setup Celery loggers
            after_setup_task_logger.connect(partial(celery_logger_setup,
                                                    app=app),
                                            weak=False)
            after_setup_logger.connect(partial(celery_logger_setup, app=app),
                                       weak=False)

        # Werkzeug only adds a stream handler if there's no other handlers
        # defined, so when Sentry adds a log handler no output is
        # received from Werkzeug unless we install a console handler here on
        # the werkzeug logger.
        if app.debug:
            logger = logging.getLogger('werkzeug')
            logger.setLevel(logging.INFO)
            handler = logging.StreamHandler()
            logger.addHandler(handler)
Esempio n. 17
0
def configure_syslog(app):
    if probe_config.log.syslog:
        app.conf.update(CELERYD_LOG_COLOR=False)
        after_setup_logger.connect(setup_log)
        after_setup_task_logger.connect(setup_log)