def connect_if_not_connected(cls):
        """Connect to Celery if not already connected, do it safely as we have concurrent server"""
        if cls._connected:
            return

        with cls._setup_lock:
            if not cls._connected:
                # We do not have to have result backend on server since we are just pushing tasks to message queue
                init_celery(result_backend=False)
                init_selinon()
    def __init__(self):
        self.log = logging.getLogger(__name__)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        logging_handler = logging.StreamHandler(sys.stdout)
        logging_handler.setFormatter(formatter)
        self.log.addHandler(logging_handler)
        self.log.level = logging.DEBUG

        if ENABLE_SCHEDULING:
            init_celery(result_backend=False)
            init_selinon()
Exemple #3
0
    def __init__(self):
        """Initialize Relase Monitor."""
        logger.info("Starting the monitor service")

        # Create PyPi monitor
        self.pypi_monitor = PypiMonitor()

        # Create NPM monitor
        self.npm_monitor = NPMMonitor()

        # Initialize Selinon if we want to run in production
        if ENABLE_SCHEDULING:
            init_celery(result_backend=False)
            init_selinon()
Exemple #4
0
def server_run_flow(flow_name, flow_args):
    """Run a flow.

    :param flow_name: name of flow to be run as stated in YAML config file
    :param flow_args: arguments for the flow
    :return: dispatcher ID handling flow
    """
    logger.info('Running flow {}'.format(flow_name))
    start = datetime.datetime.now()
    init_celery(result_backend=False)
    dispacher_id = run_flow(flow_name, flow_args)
    elapsed_seconds = (datetime.datetime.now() - start).total_seconds()
    logger.info("It took {t} seconds to start {f} flow.".format(
        t=elapsed_seconds, f=flow_name))
    return dispacher_id
Exemple #5
0
def server_run_flow(flow_name, flow_args):
    """Run a flow.

    :param flow_name: name of flow to be run as stated in YAML config file
    :param flow_args: arguments for the flow
    :return: dispatcher ID handling flow
    """
    logger.debug('Running flow %s', flow_name)
    start = datetime.datetime.now()

    init_celery(result_backend=False)
    dispacher_id = run_flow(flow_name, flow_args)

    logger.debug('It took %f seconds to start %s flow.',
                 (datetime.datetime.now() - start).total_seconds(), flow_name)
    return dispacher_id
def construct_analyses_report(ecosystem, from_date=None, to_date=None):
    """Construct analyses state report.

    :param ecosystem: name of the ecosystem
    :param from_date: datetime limitation
    :type from_date: datetime.datetime
    :param to_date: datetime limitation
    :type to_date: datetime.datetime
    :return: a dict describing the current system state
    :rtype: dict
    """
    report = {
        'report': {},
        'from_date': str(from_date) if from_date else None,
        'to_date': str(to_date) if to_date else None,
        'now': str(datetime.now())
    }

    # TODO: init only Selinon
    # there is required only Selinon configuration, we don't need to connect to queues,
    # but let's stick with this for now
    init_celery(result_backend=False)
    db = StoragePool.get_connected_storage('BayesianPostgres')

    finished_analyses = _get_finished_analyses_count(db, ecosystem, from_date,
                                                     to_date)
    unfinished_analyses = _get_unfinished_analyses_count(
        db, ecosystem, from_date, to_date)

    report['report']['ecosystem'] = ecosystem
    report['report']['analyses'] = finished_analyses + unfinished_analyses
    report['report']['analyses_finished'] = finished_analyses
    report['report']['analyses_unfinished'] = unfinished_analyses
    report['report'][
        'analyses_finished_unique'] = _get_unique_finished_analyses_count(
            db, ecosystem, from_date, to_date)
    report['report']['analyses_unique'] = _get_unique_analyses_count(
        db, ecosystem, from_date, to_date)
    report['report']['packages'] = _get_packages_count(db, ecosystem,
                                                       from_date, to_date)
    report['report']['packages_finished'] = _get_finished_packages_count(
        db, ecosystem, from_date, to_date)
    report['report']['versions'] = _get_versions_count(db, ecosystem,
                                                       from_date, to_date)

    return report
Exemple #7
0
    def __init__(self):
        """Constructor."""
        self.log = logging.getLogger(__name__)
        formatter = logging.Formatter('%(asctime)s - '
                                      '%(name)s - %(levelname)s'
                                      ' - %(message)s')
        logging_handler = logging.StreamHandler(sys.stdout)
        logging_handler.setFormatter(formatter)
        self.log.addHandler(logging_handler)
        self.log.level = logging.DEBUG
        self.old_npm_feed = None
        self.npm_feed = feedparser.parse(NPM_URL + "-/rss")
        self.old_pypi_feed = None
        self.pypi_feed = feedparser.parse(PYPI_URL + "rss/updates.xml")
        self.create_liveness_probe()

        if ENABLE_SCHEDULING:
            init_celery(result_backend=False)
            init_selinon()
Exemple #8
0
#!/usr/bin/env python
"""Start the application."""

import celery
import os
from f8a_worker.setup_celery import init_celery, init_selinon
import raven
from raven.contrib.celery import register_signal, register_logger_signal


class SentryCelery(celery.Celery):
    """Celery class to configure sentry."""
    def on_configure(self):
        """Set up sentry client."""
        dsn = os.environ.get("SENTRY_DSN")
        client = raven.Client(dsn)
        register_logger_signal(client)
        register_signal(client)


app = SentryCelery('tasks')
init_celery(app)
init_selinon(app)
Exemple #9
0
init_logging(logger)

app.add_api(defaults.SWAGGER_YAML_PATH)
app.add_api(defaults.SWAGGER_INGESTION_YAML_PATH)

# Expose for uWSGI
application.json_encoder = SafeJSONEncoder
manager = Manager(application)

# Needed for session
application.secret_key = defaults.APP_SECRET_KEY
oauth.init_app(application)

# Initializing Selinon and Celery while starting the application
logger.debug("Initializing Selinon")
init_celery(result_backend=False)
init_selinon()
logger.debug("Selinon initialized successfully")


@app.route('/')
def base_url():
    """Redirect client to the Swagger UI web page."""
    # Be nice with user access
    return redirect('api/v1/ui')


@app.route('/api/v1')
def api_v1():
    """Accept and respont to all REST API calls."""
    paths = []
Exemple #10
0
 def __init__(self):
     """Create this backend."""
     super().__init__()
     init_celery(result_backend=False)
     init_selinon()
Exemple #11
0
 def _init_celery(self):
     """ Initialize celery and connect to the broker """
     if not self._initialized_celery:
         init_celery(result_backend=False)
         self._initialized_celery = True
    """Get expiration date after which GitHub data are considered outdated."""
    return datetime.utcnow() - timedelta(days=REFRESH_INTERVAL)


def get_limit(db):
    """Get limit how many GitHub statistics can be updated at once."""
    return math.ceil(_get_count(db) / REFRESH_INTERVAL / 2)


def is_dry_run():
    """Return True if this is a dry run."""
    return os.environ.get('DRY_RUN', 'false').lower() in ('1', 'yes', 'true')


def _get_count(db):
    """Count how many GitHub data are outdated."""
    return db.query(Upstream).filter(Upstream.updated_at < get_expiration_date())\
        .filter(Upstream.url.like('%github.com%'), Upstream.deactivated_at == None).count()


def _get_outdated(db, limit):
    """Get EPs with outdated GitHub data."""
    return db.query(Upstream).join(Package).join(Ecosystem).filter(
        Upstream.url.like('%github.com%'), Upstream.deactivated_at == None).limit(limit).all()


if __name__ == '__main__':
    init_celery()
    init_selinon()
    run(sessionmaker(bind=create_engine(CONN_STR))())
def run_server_flow(flow_name, flow_args):
    """To run the worker flow via selinon."""
    init_celery(result_backend=False)
    init_selinon()
    return run_flow(flow_name, flow_args)