def connect_if_not_connected(cls):
        """Connect to Celery if not already connected, do it safely as we have concurrent server"""
        if cls._connected:
            return

        with cls._setup_lock:
            if not cls._connected:
                # We do not have to have result backend on server since we are just pushing tasks to message queue
                init_celery(result_backend=False)
                init_selinon()
    def __init__(self):
        self.log = logging.getLogger(__name__)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        logging_handler = logging.StreamHandler(sys.stdout)
        logging_handler.setFormatter(formatter)
        self.log.addHandler(logging_handler)
        self.log.level = logging.DEBUG

        if ENABLE_SCHEDULING:
            init_celery(result_backend=False)
            init_selinon()
示例#3
0
    def __init__(self):
        """Initialize Relase Monitor."""
        logger.info("Starting the monitor service")

        # Create PyPi monitor
        self.pypi_monitor = PypiMonitor()

        # Create NPM monitor
        self.npm_monitor = NPMMonitor()

        # Initialize Selinon if we want to run in production
        if ENABLE_SCHEDULING:
            init_celery(result_backend=False)
            init_selinon()
示例#4
0
    def __init__(self):
        """Constructor."""
        self.log = logging.getLogger(__name__)
        formatter = logging.Formatter('%(asctime)s - '
                                      '%(name)s - %(levelname)s'
                                      ' - %(message)s')
        logging_handler = logging.StreamHandler(sys.stdout)
        logging_handler.setFormatter(formatter)
        self.log.addHandler(logging_handler)
        self.log.level = logging.DEBUG
        self.old_npm_feed = None
        self.npm_feed = feedparser.parse(NPM_URL + "-/rss")
        self.old_pypi_feed = None
        self.pypi_feed = feedparser.parse(PYPI_URL + "rss/updates.xml")
        self.create_liveness_probe()

        if ENABLE_SCHEDULING:
            init_celery(result_backend=False)
            init_selinon()
示例#5
0
#!/usr/bin/env python
"""Start the application."""

import celery
import os
from f8a_worker.setup_celery import init_celery, init_selinon
import raven
from raven.contrib.celery import register_signal, register_logger_signal


class SentryCelery(celery.Celery):
    """Celery class to configure sentry."""
    def on_configure(self):
        """Set up sentry client."""
        dsn = os.environ.get("SENTRY_DSN")
        client = raven.Client(dsn)
        register_logger_signal(client)
        register_signal(client)


app = SentryCelery('tasks')
init_celery(app)
init_selinon(app)
示例#6
0
        if bp:
            handler = getattr(bp, 'coreapi_http_error_handler', None)
            if handler:
                return handler(e)
        return Response(e.error, status=e.status_code)

    setup_logging(app)

    @app.before_request
    def set_current_user():
        g.current_user = None

    @app.after_request
    def access_control_allow_origin(response):
        response.headers["Access-Control-Allow-Origin"] = "*"
        response.headers[
            "Access-Control-Allow-Headers"] = "authorization, content-type"
        response.headers["Access-Control-Allow-Methods"] = "DELETE, GET, HEAD, OPTIONS,"\
            "PATCH, POST, PUT"
        response.headers["Allow"] = "GET, HEAD, OPTIONS, PATCH, POST, PUT"
        return response

    return app


init_selinon()

app = create_app()

app.logger.info('App initialized, ready to roll...')
示例#7
0
 def __init__(self):
     """Create this backend."""
     super().__init__()
     init_celery(result_backend=False)
     init_selinon()
def run_server_flow(flow_name, flow_args):
    """To run the worker flow via selinon."""
    init_celery(result_backend=False)
    init_selinon()
    return run_flow(flow_name, flow_args)