def main(args):
    app = Celery('atmosphere')
    app.config_from_object('django.conf:settings')

    if args.logfile:
        handler = logging.FileHandler(args.logfile)
        handler.setFormatter(JSONFormatter())
        logger.addHandler(handler)

    handler = logstash.TCPLogstashHandler(args.host, args.port, version=1)
    logger.addHandler(handler)
    print("Monitoring started")

    while True:
        print("Sending new message")
        state = app.events.State()
        app_inspect = app.control.inspect()

        if app_inspect is not None:
            a = active_worker_and_task_count(app_inspect)
            r = reserve_count(app_inspect)
            t = total_connections_count()
            log_celery_info(active=a, reserved=r, connections=t)
        else:
            log_celery_info(error=0)

        # How often to monitor the machine
        time.sleep(args.rate)
Example #2
0
def make_celery(flask_app):

	"""

	While you can use Celery without any reconfiguration with Flask, 
	it becomes a bit nicer by subclassing tasks and adding support 
	for Flask's application contexts and hooking it up with the 
	Flask configuration.

	- http://flask.pocoo.org/docs/patterns/celery/

	"""

	celery = Celery()
	celery.config_from_object(settings.config)

	TaskBase = celery.Task
	class ContextTask(TaskBase):
	    abstract = True
	    def __call__(self, *args, **kwargs):
	        with flask_app.app_context():
	            return TaskBase.__call__(self, *args, **kwargs)
	celery.Task = ContextTask

	return celery
Example #3
0
def create_celery_app(app=None, settings_override=None):
    """Creates a celery app to perform asynchronous jobs.

    Params:
        app: A Flask app.
        settings_override: Override settings from config file.
    """
    app = app or create_app('silently_celery', os.path.dirname(__file__),
            settings_override)
    celery = Celery(__name__, broker=app.config['CELERY_BROKER_URL'])
    celery.conf.update(app.config)

    if app.debug == True:
        #Disable while image data is written to the log.
        celery.conf.CELERYD_LOG_LEVEL = LOG_LEVELS['DEBUG']

    class ContextTask(celery.Task):
        """Add Flask app context awareness."""
        abstract = True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return celery.Task.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery
Example #4
0
def make_celery():
    app, conf = create_app()
    if app.config['REDIS_PASSWORD']:
        redis_broker='redis://:{}@{}:{}/0'.format(
            app.config['REDIS_PASSWORD'],
            app.config['REDIS_HOST'],
            app.config['REDIS_PORT'],
        )
    else:
        redis_broker='redis://{}:{}/0'.format(
            app.config['REDIS_HOST'],
            app.config['REDIS_PORT'],
        )
        app.logger.info('MIGRATE_MODE: {}, MIGRATE_ES: {}, Broker at {}'
                        .format(MIGRATE_MODE, MIGRATE_ES,app.config['REDIS_HOST']))
    celery = Celery(app.import_name, broker=redis_broker)
    celery.conf.update(app.config)
    celery.data_db = app.data_db
    # boiler plate to get our tasks running in the app context
    TaskBase = celery.Task
    class ContextTask(TaskBase):
        abstract = True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    celery.Task = ContextTask
    return celery
Example #5
0
    def test_process_initializer(self, set_mp_process_title, _signals):
        from celery import Celery
        from celery import signals
        from celery.state import _tls
        from celery.concurrency.processes import process_initializer
        from celery.concurrency.processes import (WORKER_SIGRESET,
                                                  WORKER_SIGIGNORE)

        def on_worker_process_init(**kwargs):
            on_worker_process_init.called = True
        on_worker_process_init.called = False
        signals.worker_process_init.connect(on_worker_process_init)

        loader = Mock()
        loader.override_backends = {}
        app = Celery(loader=loader, set_as_current=False)
        app.loader = loader
        app.conf = AttributeDict(DEFAULTS)
        process_initializer(app, "awesome.worker.com")
        _signals.ignore.assert_any_call(*WORKER_SIGIGNORE)
        _signals.reset.assert_any_call(*WORKER_SIGRESET)
        self.assertTrue(app.loader.init_worker.call_count)
        self.assertTrue(on_worker_process_init.called)
        self.assertIs(_tls.current_app, app)
        set_mp_process_title.assert_called_with("celeryd",
                        hostname="awesome.worker.com")
    def test_install_worker_with_sudo_plugin(self):
        ctx = get_local_context()
        t.install(ctx)
        t.start(ctx)
        self.assert_installed_plugins(ctx)

        broker_url = 'amqp://*****:*****@localhost:5672//'
        c = Celery(broker=broker_url, backend=broker_url)
        kwargs = {'command': 'ls -l'}
        result = c.send_task(name='sudo_plugin.sudo.run',
                             kwargs=kwargs,
                             queue=ctx.properties['cloudify_agent']['name'])
        self.assertRaises(Exception, result.get, timeout=10)
        ctx = get_local_context()
        ctx.properties['cloudify_agent']['disable_requiretty'] = True
        t.install(ctx)
        t.start(ctx)
        self.assert_installed_plugins(ctx)

        broker_url = 'amqp://*****:*****@localhost:5672//'
        c = Celery(broker=broker_url, backend=broker_url)
        kwargs = {'command': 'ls -l'}
        result = c.send_task(name='sudo_plugin.sudo.run',
                             kwargs=kwargs,
                             queue=ctx.properties['cloudify_agent']['name'])
        result.get(timeout=10)
    def run(self):
        app = Celery(broker=self.options.broker)
        state = State()

        if self.options.factory is None:
            factory = CameraFactory(self.options.camera)
        else:
            CustomCameraFactory = import_class(self.options.factory)
            factory = CustomCameraFactory(self.options.camera)
        factory.frequency = self.options.frequency
        camera = factory.camera(state)

        with app.connection() as connection:
            camera.install()
            recv = app.events.Receiver(connection, handlers={
                'task-sent': state.task_sent,
                'task-started': state.task_started,
                'task-succeeded': state.task_succeeded,
                'task-failed': state.task_failed

            })
            try:
                recv.capture(limit=None, timeout=None, wakeup=False)
            except KeyboardInterrupt:
                raise SystemExit
            finally:
                camera.cancel()
Example #8
0
def create_application():
    """Create a Celery application using Django settings"""
    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'readthedocs.settings.dev')
    application = Celery('readthedocs')
    application.config_from_object('django.conf:settings')
    application.autodiscover_tasks(None)
    return application
Example #9
0
    def test_process_initializer(self, set_mp_process_title, _signals):
        from celery import Celery
        from celery import signals
        from celery._state import _tls
        from celery.concurrency.processes import process_initializer
        from celery.concurrency.processes import (WORKER_SIGRESET,
                                                  WORKER_SIGIGNORE)

        def on_worker_process_init(**kwargs):
            on_worker_process_init.called = True
        on_worker_process_init.called = False
        signals.worker_process_init.connect(on_worker_process_init)

        loader = Mock()
        loader.override_backends = {}
        app = Celery(loader=loader, set_as_current=False)
        app.loader = loader
        app.conf = AttributeDict(DEFAULTS)
        process_initializer(app, 'awesome.worker.com')
        _signals.ignore.assert_any_call(*WORKER_SIGIGNORE)
        _signals.reset.assert_any_call(*WORKER_SIGRESET)
        self.assertTrue(app.loader.init_worker.call_count)
        self.assertTrue(on_worker_process_init.called)
        self.assertIs(_tls.current_app, app)
        set_mp_process_title.assert_called_with(
            'celeryd', hostname='awesome.worker.com',
        )

        with patch('celery.task.trace.setup_worker_optimizations') as swo:
            os.environ['FORKED_BY_MULTIPROCESSING'] = "1"
            try:
                process_initializer(app, 'luke.worker.com')
                swo.assert_called_with(app)
            finally:
                os.environ.pop('FORKED_BY_MULTIPROCESSING', None)
Example #10
0
class InspireCeleryPushPipeline(InspireAPIPushPipeline):
    """Push to INSPIRE API via Celery."""

    def __init__(self):
        from celery import Celery

        super(InspireCeleryPushPipeline, self).__init__()
        self.celery = Celery()

    def open_spider(self, spider):
        self.celery.conf.update(dict(
            BROKER_URL=spider.settings['BROKER_URL'],
            CELERY_RESULT_BACKEND=spider.settings['CELERY_RESULT_BACKEND'],
            CELERY_ACCEPT_CONTENT=spider.settings['CELERY_ACCEPT_CONTENT'],
            CELERY_TIMEZONE=spider.settings['CELERY_TIMEZONE'],
            CELERY_DISABLE_RATE_LIMITS=spider.settings['CELERY_DISABLE_RATE_LIMITS'],
            CELERY_TASK_SERIALIZER='json',
            CELERY_RESULT_SERIALIZER='json',
        ))

    def close_spider(self, spider):
        """Post results to BROKER API."""
        if 'SCRAPY_JOB' in os.environ and self.count > 0:
            task_endpoint = spider.settings['API_PIPELINE_TASK_ENDPOINT_MAPPING'].get(
                spider.name, spider.settings['API_PIPELINE_TASK_ENDPOINT_DEFAULT']
            )
            self.celery.send_task(
                task_endpoint,
                kwargs=self._prepare_payload(spider),
            )

        self._cleanup(spider)
def get_celery_app():
	conf = get_site_config()
	app = Celery('frappe',
			broker=conf.celery_broker or DEFAULT_CELERY_BROKER,
			backend=conf.async_redis_server or DEFAULT_CELERY_BACKEND)

	app.autodiscover_tasks(frappe.get_all_apps(with_frappe=True, with_internal_apps=False,
		sites_path=SITES_PATH))

	app.conf.CELERY_TASK_SERIALIZER = 'json'
	app.conf.CELERY_ACCEPT_CONTENT = ['json']
	app.conf.CELERY_TIMEZONE = 'UTC'
	app.conf.CELERY_RESULT_SERIALIZER = 'json'
	app.conf.CELERY_TASK_RESULT_EXPIRES = timedelta(0, 3600)

	if conf.monitory_celery:
		app.conf.CELERY_SEND_EVENTS = True
		app.conf.CELERY_SEND_TASK_SENT_EVENT = True

	app.conf.CELERY_ROUTES = (SiteRouter(), AsyncTaskRouter())

	app.conf.CELERYBEAT_SCHEDULE = get_beat_schedule(conf)

	if conf.celery_error_emails:
		app.conf.CELERY_SEND_TASK_ERROR_EMAILS = True
		for k, v in conf.celery_error_emails.iteritems():
			setattr(app.conf, k, v)

	return app
Example #12
0
def add_scan_for_project_with_repo(repo_url: str, branch: str=None):
    """
    If a project with repo_url exists in the database, adds a scan to it

    :param repo_url: (str) repo url for the project to launch the scan
    :param branch: (str, Optional) branch for the project to launch the scan
    :return:
    """
    assert type(repo_url) is str

    with db.session_scope() as session:

        project = get_project_by_repo(repo_url, session)

        allowed_scan = True
        if ALLOWED_SCANS_PER_PERIOD > 0:
            previous_scans = get_num_scans_in_last_minutes(project.id, ALLOWED_SCANS_CHECK_PERIOD, session)
            allowed_scan = previous_scans < ALLOWED_SCANS_PER_PERIOD

        if allowed_scan:
            scan = add_scan(project.id, session, branch=branch)
            session.commit()

            celery = Celery('deeptracy', broker=BROKER_URI)
            celery.send_task('prepare_scan', [scan.id])
        else:
            raise APIError('cant create more scans', status_code=503)
Example #13
0
def check_heartbeat():
    celery_monitoring = getattr(settings, 'CELERY_FLOWER_URL', None)
    if celery_monitoring:
        all_workers = requests.get(
            celery_monitoring + '/api/workers',
            params={'status': True},
            timeout=3,
        ).json()
        bad_workers = []
        expected_running, expected_stopped = parse_celery_workers(all_workers)

        celery = Celery()
        celery.config_from_object(settings)
        worker_responses = celery.control.ping(timeout=10)
        pings = parse_celery_pings(worker_responses)

        for hostname in expected_running:
            if hostname not in pings or not pings[hostname]:
                bad_workers.append('* {} celery worker down'.format(hostname))

        for hostname in expected_stopped:
            if hostname in pings:
                bad_workers.append(
                    '* {} celery worker is running when we expect it to be stopped.'.format(hostname)
                )

        if bad_workers:
            return ServiceStatus(False, '\n'.join(bad_workers))

    is_alive = heartbeat.is_alive()
    return ServiceStatus(is_alive, "OK" if is_alive else "DOWN")
Example #14
0
def make_celery(app):
    cel = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'])
    # celery.conf.update(
        # CELERY_IMPORTS=(
            # 'buyapi.tasks',   # we're not including our tasks here as
            # 'app.module_b.tasks',   # our tasks are in other files listed here
        # )
    # )
    cel.conf.update(CELERYBEAT_SCHEDULE={
        # Executes every Monday morning at 7:30 A.M
        'every_minute': {
            'task': 'tasks.regular.run_every_minute',
            'schedule': timedelta(minutes=1),
        },
    }, CELERY_TIMEZONE='Europe/Moscow')

    cel.conf.update(app.config)
    # cel.conf.update(CELERY_TASK_RESULT_EXPIRES=10)
    TaskBase = cel.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    cel.Task = ContextTask
    return cel
    def celeryClient(self,actorName,url="redis://localhost:9999/0",actorsPath="actors",local=False):
        if actorName in self.actors:
            return self.actors[actorName]

        if self.app==None:

            from celery import Celery

            app = Celery('tasks', broker=url)

            app.conf.update(
                CELERY_TASK_SERIALIZER='json',
                CELERY_ACCEPT_CONTENT=['json'],  # Ignore other content
                CELERY_RESULT_SERIALIZER='json',
                CELERY_TIMEZONE='Europe/Oslo',
                CELERY_ENABLE_UTC=True,
                CELERY_RESULT_BACKEND = 'rpc',
                CELERY_RESULT_PERSISTENT = True,
                # CELERY_RESULT_BACKEND = BROKER_URL,
            )

            if local:
                app.conf["CELERY_ALWAYS_EAGER"]=False

            self.app=app
        else:
            app=self.app

        code=self.getCodeClient(actorsPath,actorName=actorName)
        exec(code,locals(),globals())

        self.actors[actorName]=eval("%s"%actorName)

        return self.actors[actorName]
Example #16
0
def post_delta():
    """Add new vulnerabilities on the database

    Add new vulnerabilities on existing project

    Example:
        Body
        {
          "project_id": "00001",
          "lang": "javascript",
          "branch": "develop" //Optional
        }

    :return codes:  201 on success
                    400 on errors
    """
    with db.session_scope() as session:
        data = request.get_json()

        if not data:
            return api_error_response('invalid payload'), 400

        celery = Celery('deeptracy', broker=BROKER_URI)
        celery.send_task('notify_patton_deltas', [data])
        return "Se ha procesado correctamente", 201
Example #17
0
    def test_no_conn_driver_info(self):
        app = Celery(set_as_current=False)
        app.connection = Mock()
        conn = app.connection.return_value = Mock()
        conn.transport = None

        bugreport(app)
Example #18
0
    def test_add_defaults(self):
        app = Celery(set_as_current=False)

        self.assertFalse(app.configured)
        _conf = {'FOO': 300}
        conf = lambda: _conf
        app.add_defaults(conf)
        self.assertIn(conf, app._pending_defaults)
        self.assertFalse(app.configured)
        self.assertEqual(app.conf.FOO, 300)
        self.assertTrue(app.configured)
        self.assertFalse(app._pending_defaults)

        # defaults not pickled
        appr = loads(dumps(app))
        with self.assertRaises(AttributeError):
            appr.conf.FOO

        # add more defaults after configured
        conf2 = {'FOO': 'BAR'}
        app.add_defaults(conf2)
        self.assertEqual(app.conf.FOO, 'BAR')

        self.assertIn(_conf, app.conf.defaults)
        self.assertIn(conf2, app.conf.defaults)
Example #19
0
    def test_registers_to_all_apps(self):
        xproj = Celery('xproj')
        xproj.finalize()

        @shared_task
        def foo():
            return 42

        @shared_task()
        def bar():
            return 84

        self.assertIs(foo.app, xproj)
        self.assertIs(bar.app, xproj)
        self.assertTrue(foo._get_current_object())

        yproj = Celery('yproj')
        self.assertIs(foo.app, yproj)
        self.assertIs(bar.app, yproj)

        @shared_task()
        def baz():
            return 168

        self.assertIs(baz.app, yproj)
Example #20
0
def create_celery(app):
    global __celery
    if __celery:
        return __celery

    celery = Celery(
        app.import_name,
        backend=app.config['CELERY_RESULT_BACKEND'],
        broker=app.config['BROKER_URL'],
    )
    celery.conf.update(app.config)

    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                db.session = db.create_scoped_session()
                try:
                    response = TaskBase.__call__(self, *args, **kwargs)
                finally:
                    db.session.remove()
                return response

    celery.Task = ContextTask

    __celery = celery
    return __celery
Example #21
0
def create_celery_app(app=None):
    """Create a new Celery object and tie together the Celery config to the
    app's config. Wrap all tasks in the context of the application.

    Args:
        app (Flask)

    Returns:
        celery (Celery)

    """

    app = app or create_app()

    celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'],
                    include=CELERY_TASK_LIST)
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery
Example #22
0
def setup():
    global app
    # pylint:disable=import-error
    from celery import Celery
    app = Celery()
    app.config_from_object('django.conf:settings')
    app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
Example #23
0
def make_celery(app):
    """
    Recommended from Flask's documentation to set up the Celery object.
    :param app:
    :return:
    """
    celery = Celery(app.import_name)

    # Determine which Celery configuration to load:
    # The order is:
    # 1. `SM_CELERY_CONFIG` Environment Variable
    # 2. The default "celeryconfig.py"
    celery.config_from_object(get_celery_config_file())
    celery.conf.update(app.config)

    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery
Example #24
0
class Broker(object):
    def __init__(self):
#        self.broker = Celery('demo',
#                            broker = 'amqp://mw01/',
#                            backend = 'redis://mw01/')
        self.broker = Celery('demo')
        self.broker.config_from_object('django.conf:settings')

    def execute(self, task_name, *args, **kwargs):
        logger.debug('Calling task ' + task_name + '...')
        result = self.broker.send_task(task_name, args = args, kwargs = {})
        counter = 0
        times_to_try = 5
        time_interval = 1
        while counter < times_to_try:
            if result.ready():
                logger.debug('Got result!')
                return {'result': result.result}
            else:
                logger.debug('Waiting for %d  second...' % time_interval)
                time.sleep(time_interval)
                counter += 1
                time_interval += 1
        if counter >= times_to_try:
            return {'error': "I'm not that patient, haven't got the result"}

    def process_request(self, request):
        request.broker = Broker()
Example #25
0
class CeleryTasks:

    def __init__(self, key_id, key_secret, region, timeout=None, polling_interval=None):
        self.celery = Celery(broker='sqs://%s:%s@' % (key_id, key_secret))

        self.celery.conf.update(
            BROKER_TRANSPORT_OPTIONS = {
                'region': region,
                'visibility_timeout': timeout or 120,
                'polling_interval': polling_interval or 20,
                'queue_name_prefix': 'celery-remotecv-'
            }
        )

    def get_detect_task(self):
        @self.celery.task(ignore_result=True, acks_late=True)
        def detect_task(detection_type, image_path, key):
            DetectTask.perform(detection_type, image_path, key)

        return detect_task

    def run_commands(self, args, log_level=None):
        # We have to init the task so it can be found by the worker later
        self.get_detect_task()

        if log_level:
            self.celery.conf.update(CELERYD_LOG_LEVEL=log_level)
        self.celery.start(args)
Example #26
0
def make_celery():
    """
    Configures Celery task queue
    """
    celery = Celery("tasks", broker=SETTINGS['MESSAGE_BROKER_URL'],
                    backend=SETTINGS['REDIS_URL'])
    # Settings to adjust to the AMQP message quota
    celery.conf.update(
            CELERY_TASK_SERIALIZER='pickle',
            CELERY_ACCEPT_CONTENT=['pickle'],
            CELERY_RESULT_SERIALIZER='json',
            BROKER_POOL_LIMIT=1,
            CELERYD_CONCURRENCY=2,
            CELERYD_PROCESSES=1
    )
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            return TaskBase.__call__(self, *args, **kwargs)

    celery.Task = ContextTask
    return celery
Example #27
0
def main(argv):
    try:
        idx = argv.index("--")
    except ValueError:
        calc_opts = []
    else:
        calc_opts = argv[(idx + 1):]
        del argv[idx:]
    parser = make_parser()
    args = parser.parse_args(argv[1:])
    whitelist = get_whitelist(args.include) if args.include else None

    celery = Celery(broker=args.broker)

    with open(args.log, "w") as fo:
        for i, (subdir_bn, avro_bn) in enumerate(iter_input(args.input_dir)):
            if whitelist and (subdir_bn, avro_bn) not in whitelist:
                continue
            if args.limit and i >= args.limit:
                break

            celery_args = get_celery_args(args, calc_opts, subdir_bn, avro_bn)

            if args.dry_run:
                print celery_args
            else:
                r = celery.send_task('tasks.run_docker', kwargs=celery_args)
                fo.write(str(r) + "\n")
Example #28
0
def create_celery_app(flask_app=None):
    logger.debug("Creating celery app")
    if not flask_app:
        if has_app_context():
            logger.debug("Using current flask app")
            app = current_app
        else:
            logger.debug("No current flask app")
            app = create_app()
    else:
        app = flask_app
    celery = Celery(app.import_name,
                    broker=app.config['CELERY_BROKER_URL'],
                    backend=app.config['CELERY_RESULT_BACKEND'])
    celery.conf.update(app.config)
    TaskBase = celery.Task
    class ContextTask(TaskBase):
        abstract = True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    celery.Task = ContextTask
    celery.logger = get_task_logger(__name__)
    app.celery = celery
    return app
Example #29
0
def run(config_path, args):
    args.append("-B")
    args.append("-l")
    args.append("INFO")
    core.load_config(config_path)
    core.load_storage()
    core.load_session()

    broker = core.config.get("worker", "broker")
    interval = core.config.get("worker", "interval")
    retention = core.config.get("worker", "retention")
    story_before_retention = core.config.get("worker", "story_before_retention")

    celery = Celery("tasks", broker=broker)

    @celery.task
    def refresh_all():
        helpers.refresh_all()

    @celery.task
    def run_retention(delta):
        helpers.run_retention(delta)

    celery.conf.CELERYBEAT_SCHEDULE = {
        "refresh-job": {"task": "feedcollector.worker.refresh_all", "schedule": timedelta(minutes=int(interval))},
        "retention-job": {
            "task": "feedcollector.worker.run_retention",
            "schedule": timedelta(days=1),
            "args": (int(retention), int(story_before_retention)),
        },
    }
    celery.conf.INSTALLED_APPS = ("tasks.refresh_all", "tasks.run_retention")

    print("Args: %s" % " ".join(args))
    celery.start(args)
Example #30
0
def make_celery(app):
    # default to using JSON, rather than Pickle (Celery's default, but Celery
    # warns about it)
    for var, dfl in [
            ('CELERY_ACCEPT_CONTENT', ['json']),
            ('CELERY_TASK_SERIALIZER', 'json'),
            ('CELERY_RESULT_SERIALIZER', 'json')]:
        app.config[var] = app.config.get(var, dfl)
    broker = app.config.get('CELERY_BROKER_URL', 'memory://')
    celery = Celery(app.import_name, broker=broker)
    celery.conf.update(app.config)
    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                try:
                    return TaskBase.__call__(self, *args, **kwargs)
                finally:
                    # flush any open DB sessions used by this task
                    current_app.db.flush_sessions()

    celery.Task = ContextTask
    app.celery_tasks = dict((fn, celery.task(**kwargs)(fn))
                            for (fn, kwargs) in _defined_tasks.iteritems())
    return celery
Example #31
0
from __future__ import absolute_import, unicode_literals

import os

from celery import Celery

# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gnkg.settings")

app = Celery("gnkg")

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object("django.conf:settings", namespace="CELERY")

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()


@app.task(bind=True)
def debug_task(self):
    print("Request: {0!r}".format(self.request))
Example #32
0
import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings

if not settings.configured:
    # set the default Django settings module for the 'celery' program.
    os.environ.setdefault('DJANGO_SETTINGS_MODULE',
                          'config.settings.local')  # pragma: no cover

app = Celery('bug_report_tool')


class CeleryConfig(AppConfig):
    name = 'bug_report_tool.taskapp'
    verbose_name = 'Celery Config'

    def ready(self):
        # Using a string here means the worker will not have to
        # pickle the object when using Windows.
        app.config_from_object('django.conf:settings')
        installed_apps = [
            app_config.name for app_config in apps.get_app_configs()
        ]
        app.autodiscover_tasks(lambda: installed_apps, force=True)

        if hasattr(settings, 'RAVEN_CONFIG'):
            # Celery signal registration

            from raven import Client as RavenClient
            from raven.contrib.celery import register_signal as raven_register_signal
Example #33
0
import os
from celery import Celery

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_traceability.settings')

app = Celery(
    'django_traceability',
    broker='amqp://*****:*****@localhost',
    #include=['django_traceability.forum.tasks']
)
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks(['forum'])

if __name__ == '__main__':
    app.start()
Example #34
0
    def connect(self, **kwargs):

        variables = self.variables.copy()
        variables.update(kwargs)
        broker = variables.get("broker")

        if broker is None:  # pragma: no cover
            print_and_exit("Unable to start Celery, missing broker service")

        if broker == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            self.celery_app.conf.broker_use_ssl = Env.to_bool(
                service_vars.get("ssl_enabled"))

            self.celery_app.conf.broker_url = self.get_rabbit_url(
                service_vars, protocol="amqp")

        elif broker == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.broker_use_ssl = False

            self.celery_app.conf.broker_url = self.get_redis_url(
                service_vars, protocol="redis")

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown broker service: {}", broker)

        log.info(
            "Configured {} as broker {}",
            broker,
            obfuscate_url(self.celery_app.conf.broker_url),
        )
        # From the guide: "Default: Taken from broker_url."
        # But it is not true, connection fails if not explicitly set
        self.celery_app.conf.broker_read_url = self.celery_app.conf.broker_url
        self.celery_app.conf.broker_write_url = self.celery_app.conf.broker_url

        backend = variables.get("backend", broker)

        if backend == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            log.warning(
                "RABBIT backend is quite limited and not fully supported. "
                "Consider to enable Redis or MongoDB as a backend database")
            self.celery_app.conf.result_backend = self.get_rabbit_url(
                service_vars, protocol="rpc")

        elif backend == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.result_backend = self.get_redis_url(
                service_vars, protocol="redis")
            # set('redis_backend_use_ssl', kwargs.get('redis_backend_use_ssl'))

        elif backend == "MONGODB":
            service_vars = Env.load_variables_group(prefix="mongo")

            self.celery_app.conf.result_backend = self.get_mongodb_url(
                service_vars, protocol="mongodb")

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown backend service: {}", backend)

        log.info(
            "Configured {} as backend {}",
            backend,
            obfuscate_url(self.celery_app.conf.result_backend),
        )

        # Should be enabled?
        # Default: Disabled by default (transient messages).
        # If set to True, result messages will be persistent.
        # This means the messages won’t be lost after a broker restart.
        # self.celery_app.conf.result_persistent = True

        # Skip initial warnings, avoiding pickle format (deprecated)
        self.celery_app.conf.accept_content = ["json"]
        self.celery_app.conf.task_serializer = "json"
        self.celery_app.conf.result_serializer = "json"

        # Already enabled by default to use UTC
        # self.celery_app.conf.enable_utc
        # self.celery_app.conf.timezone

        # Not needed, because tasks are dynamcally injected
        # self.celery_app.conf.imports
        # self.celery_app.conf.includes

        # Max priority default value for all queues
        # Required to be able to set priority parameter on task calls
        self.celery_app.conf.task_queue_max_priority = 10

        # Default priority for taks (if not specified)
        self.celery_app.conf.task_default_priority = 5

        # If you want to apply a more strict priority to items
        # probably prefetching should also be disabled:

        # Late ack means the task messages will be acknowledged after the task
        # has been executed, not just before (the default behavior).
        # self.celery_app.conf.task_acks_late = True

        # How many messages to prefetch at a time multiplied by the number
        # of concurrent processes. The default is 4 (four messages for each process).
        # The default setting is usually a good choice, however – if you have very
        # long running tasks waiting in the queue and you have to start the workers,
        # note that the first worker to start will receive four times the number
        # of messages initially. Thus the tasks may not be fairly distributed to
        # the workers. To disable prefetching, set worker_prefetch_multiplier to 1.
        # Changing that setting to 0 will allow the worker to keep consuming as many
        # messages as it wants.
        self.celery_app.conf.worker_prefetch_multiplier = 1

        if Env.get_bool("CELERYBEAT_ENABLED"):

            CeleryExt.CELERYBEAT_SCHEDULER = backend

            if backend == "MONGODB":
                service_vars = Env.load_variables_group(prefix="mongo")
                url = self.get_mongodb_url(service_vars, protocol="mongodb")
                SCHEDULER_DB = "celery"
                self.celery_app.conf[
                    "CELERY_MONGODB_SCHEDULER_DB"] = SCHEDULER_DB
                self.celery_app.conf[
                    "CELERY_MONGODB_SCHEDULER_COLLECTION"] = "schedules"
                self.celery_app.conf["CELERY_MONGODB_SCHEDULER_URL"] = url

                import mongoengine

                m = mongoengine.connect(SCHEDULER_DB, host=url)
                log.info("Celery-beat connected to MongoDB: {}", m)
            elif backend == "REDIS":

                service_vars = Env.load_variables_group(prefix="redis")
                url = self.get_redis_url(service_vars, protocol="redis")

                self.celery_app.conf["REDBEAT_REDIS_URL"] = url
                self.celery_app.conf["REDBEAT_KEY_PREFIX"] = REDBEAT_KEY_PREFIX
                log.info("Celery-beat connected to Redis: {}",
                         obfuscate_url(url))
            else:  # pragma: no cover
                log.warning(
                    "Cannot configure celery beat scheduler with backend: {}",
                    backend)

        # self.disconnected = False

        conf = self.celery_app.conf
        # Replace the previous App with new settings
        self.celery_app = Celery("RAPyDo",
                                 broker=conf["broker_url"],
                                 backend=conf["result_backend"])
        self.celery_app.conf = conf

        for funct in Meta.get_celery_tasks(f"{CUSTOM_PACKAGE}.tasks"):
            # Weird errors due to celery-stubs?
            # "Callable[[], Any]" has no attribute "register"
            # The code is correct... let's ignore it
            self.celery_app.tasks.register(funct)  # type: ignore

        return self
Example #35
0
class CeleryExt(Connector):

    CELERYBEAT_SCHEDULER: Optional[str] = None
    celery_app: Celery = Celery("RAPyDo")

    def get_connection_exception(self):
        return None

    @staticmethod
    def get_rabbit_url(variables: Dict[str, str], protocol: str) -> str:
        host = variables.get("host")
        port = Env.to_int(variables.get("port"))
        vhost = variables.get("vhost", "")
        vhost = f"/{vhost}"

        user = variables.get("user", "")
        pwd = variables.get("password", "")
        creds = ""
        if user and pwd:
            creds = f"{user}:{pwd}@"

        return f"{protocol}://{creds}{host}:{port}{vhost}"

    @staticmethod
    def get_redis_url(variables: Dict[str, str], protocol: str) -> str:
        host = variables.get("host")
        port = Env.to_int(variables.get("port"))
        pwd = variables.get("password", "")
        creds = ""
        if pwd:
            creds = f":{pwd}@"

        return f"{protocol}://{creds}{host}:{port}/0"

    @staticmethod
    def get_mongodb_url(variables: Dict[str, str], protocol: str) -> str:
        host = variables.get("host")
        port = Env.to_int(variables.get("port"))
        user = variables.get("user", "")
        pwd = variables.get("password", "")

        creds = ""
        if user and pwd:
            creds = f"{user}:{pwd}@"

        return f"{protocol}://{creds}{host}:{port}"

    def connect(self, **kwargs):

        variables = self.variables.copy()
        variables.update(kwargs)
        broker = variables.get("broker")

        if broker is None:  # pragma: no cover
            print_and_exit("Unable to start Celery, missing broker service")

        if broker == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            self.celery_app.conf.broker_use_ssl = Env.to_bool(
                service_vars.get("ssl_enabled"))

            self.celery_app.conf.broker_url = self.get_rabbit_url(
                service_vars, protocol="amqp")

        elif broker == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.broker_use_ssl = False

            self.celery_app.conf.broker_url = self.get_redis_url(
                service_vars, protocol="redis")

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown broker service: {}", broker)

        log.info(
            "Configured {} as broker {}",
            broker,
            obfuscate_url(self.celery_app.conf.broker_url),
        )
        # From the guide: "Default: Taken from broker_url."
        # But it is not true, connection fails if not explicitly set
        self.celery_app.conf.broker_read_url = self.celery_app.conf.broker_url
        self.celery_app.conf.broker_write_url = self.celery_app.conf.broker_url

        backend = variables.get("backend", broker)

        if backend == "RABBIT":
            service_vars = Env.load_variables_group(prefix="rabbitmq")

            log.warning(
                "RABBIT backend is quite limited and not fully supported. "
                "Consider to enable Redis or MongoDB as a backend database")
            self.celery_app.conf.result_backend = self.get_rabbit_url(
                service_vars, protocol="rpc")

        elif backend == "REDIS":
            service_vars = Env.load_variables_group(prefix="redis")

            self.celery_app.conf.result_backend = self.get_redis_url(
                service_vars, protocol="redis")
            # set('redis_backend_use_ssl', kwargs.get('redis_backend_use_ssl'))

        elif backend == "MONGODB":
            service_vars = Env.load_variables_group(prefix="mongo")

            self.celery_app.conf.result_backend = self.get_mongodb_url(
                service_vars, protocol="mongodb")

        else:  # pragma: no cover
            print_and_exit(
                "Unable to start Celery: unknown backend service: {}", backend)

        log.info(
            "Configured {} as backend {}",
            backend,
            obfuscate_url(self.celery_app.conf.result_backend),
        )

        # Should be enabled?
        # Default: Disabled by default (transient messages).
        # If set to True, result messages will be persistent.
        # This means the messages won’t be lost after a broker restart.
        # self.celery_app.conf.result_persistent = True

        # Skip initial warnings, avoiding pickle format (deprecated)
        self.celery_app.conf.accept_content = ["json"]
        self.celery_app.conf.task_serializer = "json"
        self.celery_app.conf.result_serializer = "json"

        # Already enabled by default to use UTC
        # self.celery_app.conf.enable_utc
        # self.celery_app.conf.timezone

        # Not needed, because tasks are dynamcally injected
        # self.celery_app.conf.imports
        # self.celery_app.conf.includes

        # Max priority default value for all queues
        # Required to be able to set priority parameter on task calls
        self.celery_app.conf.task_queue_max_priority = 10

        # Default priority for taks (if not specified)
        self.celery_app.conf.task_default_priority = 5

        # If you want to apply a more strict priority to items
        # probably prefetching should also be disabled:

        # Late ack means the task messages will be acknowledged after the task
        # has been executed, not just before (the default behavior).
        # self.celery_app.conf.task_acks_late = True

        # How many messages to prefetch at a time multiplied by the number
        # of concurrent processes. The default is 4 (four messages for each process).
        # The default setting is usually a good choice, however – if you have very
        # long running tasks waiting in the queue and you have to start the workers,
        # note that the first worker to start will receive four times the number
        # of messages initially. Thus the tasks may not be fairly distributed to
        # the workers. To disable prefetching, set worker_prefetch_multiplier to 1.
        # Changing that setting to 0 will allow the worker to keep consuming as many
        # messages as it wants.
        self.celery_app.conf.worker_prefetch_multiplier = 1

        if Env.get_bool("CELERYBEAT_ENABLED"):

            CeleryExt.CELERYBEAT_SCHEDULER = backend

            if backend == "MONGODB":
                service_vars = Env.load_variables_group(prefix="mongo")
                url = self.get_mongodb_url(service_vars, protocol="mongodb")
                SCHEDULER_DB = "celery"
                self.celery_app.conf[
                    "CELERY_MONGODB_SCHEDULER_DB"] = SCHEDULER_DB
                self.celery_app.conf[
                    "CELERY_MONGODB_SCHEDULER_COLLECTION"] = "schedules"
                self.celery_app.conf["CELERY_MONGODB_SCHEDULER_URL"] = url

                import mongoengine

                m = mongoengine.connect(SCHEDULER_DB, host=url)
                log.info("Celery-beat connected to MongoDB: {}", m)
            elif backend == "REDIS":

                service_vars = Env.load_variables_group(prefix="redis")
                url = self.get_redis_url(service_vars, protocol="redis")

                self.celery_app.conf["REDBEAT_REDIS_URL"] = url
                self.celery_app.conf["REDBEAT_KEY_PREFIX"] = REDBEAT_KEY_PREFIX
                log.info("Celery-beat connected to Redis: {}",
                         obfuscate_url(url))
            else:  # pragma: no cover
                log.warning(
                    "Cannot configure celery beat scheduler with backend: {}",
                    backend)

        # self.disconnected = False

        conf = self.celery_app.conf
        # Replace the previous App with new settings
        self.celery_app = Celery("RAPyDo",
                                 broker=conf["broker_url"],
                                 backend=conf["result_backend"])
        self.celery_app.conf = conf

        for funct in Meta.get_celery_tasks(f"{CUSTOM_PACKAGE}.tasks"):
            # Weird errors due to celery-stubs?
            # "Callable[[], Any]" has no attribute "register"
            # The code is correct... let's ignore it
            self.celery_app.tasks.register(funct)  # type: ignore

        return self

    def disconnect(self) -> None:
        self.disconnected = True

    def is_connected(self) -> bool:

        log.warning("celery.is_connected method is not implemented")
        return not self.disconnected

    @classmethod
    def get_periodic_task(cls, name: str) -> Any:

        if cls.CELERYBEAT_SCHEDULER == "MONGODB":
            from celerybeatmongo.models import DoesNotExist, PeriodicTask

            try:
                return PeriodicTask.objects.get(name=name)
            except DoesNotExist:
                return None
        if cls.CELERYBEAT_SCHEDULER == "REDIS":
            from redbeat.schedulers import RedBeatSchedulerEntry

            try:
                task_key = f"{REDBEAT_KEY_PREFIX}{name}"
                return RedBeatSchedulerEntry.from_key(task_key,
                                                      app=CeleryExt.celery_app)
            except KeyError:
                return None
        raise AttributeError(
            f"Unsupported celery-beat scheduler: {cls.CELERYBEAT_SCHEDULER}")

    @classmethod
    def delete_periodic_task(cls, name: str) -> bool:
        t = cls.get_periodic_task(name)
        if t is None:
            return False
        t.delete()
        return True

    # period = ('days', 'hours', 'minutes', 'seconds', 'microseconds')
    @classmethod
    def create_periodic_task(
        cls,
        name: str,
        task: str,
        every: Union[str, int, timedelta],
        period: AllowedTimedeltaPeriods = "seconds",
        args: List[Any] = None,
        kwargs: Dict[str, Any] = None,
    ) -> None:
        if args is None:
            args = []
        if kwargs is None:
            kwargs = {}

        if cls.CELERYBEAT_SCHEDULER == "MONGODB":
            from celerybeatmongo.models import PeriodicTask

            PeriodicTask(
                name=name,
                task=task,
                enabled=True,
                args=args,
                kwargs=kwargs,
                interval=PeriodicTask.Interval(every=every, period=period),
            ).save()
        elif cls.CELERYBEAT_SCHEDULER == "REDIS":
            from celery.schedules import schedule
            from redbeat.schedulers import RedBeatSchedulerEntry

            # convert strings and integers to timedeltas
            if isinstance(every, str) and every.isdigit():
                every = get_timedelta(int(every), period)
            elif isinstance(every, int):
                every = get_timedelta(every, period)

            if not isinstance(every, timedelta):
                t = type(every).__name__
                raise AttributeError(
                    f"Invalid input parameter every = {every} (type {t})")
            interval = schedule(run_every=every)
            entry = RedBeatSchedulerEntry(name,
                                          task,
                                          interval,
                                          args=args,
                                          app=CeleryExt.celery_app)
            entry.save()

        else:
            raise AttributeError(
                f"Unsupported celery-beat scheduler: {cls.CELERYBEAT_SCHEDULER}"
            )

    @classmethod
    def create_crontab_task(
        cls,
        name: str,
        task: str,
        minute: str,
        hour: str,
        day_of_week: str = "*",
        day_of_month: str = "*",
        month_of_year: str = "*",
        args: List[Any] = None,
        kwargs: Dict[str, Any] = None,
    ) -> None:

        if args is None:
            args = []
        if kwargs is None:
            kwargs = {}

        if cls.CELERYBEAT_SCHEDULER == "MONGODB":
            from celerybeatmongo.models import PeriodicTask

            PeriodicTask(
                name=name,
                task=task,
                enabled=True,
                args=args,
                kwargs=kwargs,
                crontab=PeriodicTask.Crontab(
                    minute=minute,
                    hour=hour,
                    day_of_week=day_of_week,
                    day_of_month=day_of_month,
                    month_of_year=month_of_year,
                ),
            ).save()
        elif cls.CELERYBEAT_SCHEDULER == "REDIS":
            from celery.schedules import crontab
            from redbeat.schedulers import RedBeatSchedulerEntry

            interval = crontab(
                minute=minute,
                hour=hour,
                day_of_week=day_of_week,
                day_of_month=day_of_month,
                month_of_year=month_of_year,
            )

            entry = RedBeatSchedulerEntry(name,
                                          task,
                                          interval,
                                          args=args,
                                          app=CeleryExt.celery_app)
            entry.save()

        else:
            raise AttributeError(
                f"Unsupported celery-beat scheduler: {cls.CELERYBEAT_SCHEDULER}"
            )
Example #36
0
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import absolute_import
import os
from celery import Celery

from django.conf import settings

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'hardware.settings')

app = Celery('hardware')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
Example #37
0
from celery import Celery
from kombu import Exchange, Queue

app = Celery('sim_worker',
            # broker='redis://redis:6379/0',
            # backend='redis://redis:6379/0',
             broker='redis://128.3.111.21:6379/0', # <= redis IP goes here
             backend='redis://128.3.111.21:6379/0', # <= redis IP goes here
             include=['sim_worker.tasks'])

# This configuration should be the same evrey where the sim_worker celery application is used
# /front-end/django-project/sim_worker/celery.py
# /front_end/worker/sim_worker/celery.py
# /front_end/dummy_worker/sim_worker/celery.py
# If you modify one, please copy/paste the modifications into the others
app.conf.update(
    CELERY_TASK_RESULT_EXPIRES=600,
    CELERY_TRACK_STARTED=True,
    CELERY_ACCEPT_CONTENT = ['json'],
    CELERY_RESULT_SERIALIZER = 'json',
    CELERY_TASK_SERIALIZER = 'json',
    CELERY_DEFAULT_QUEUE = 'sim_worker',
    CELERY_QUEUES = (
        Queue('sim_worker', Exchange('sim_worker'), routing_key='sim_worker'),
    )
    # The default queue is changed to sim_woker to be sure those task are execute by the simulation worker
    # (and not the worker that run the celery_beat on the wsgi container which use the same redis DB)
)

if __name__ == '__main__':
    app.start()
import subprocess
import os

from celery import Celery

app = Celery('tasks', backend='rpc://', broker='pyamqp://[email protected]//')

WORKER_REFERENCE_DIR="/tank/genome_data/references/"  # This should be a directory on the worker's local drive (to avoid network I/O) where we keep the references

TMP_DIR="/tank/test/tmp/"
BIN_DIR="/tank/test/bin/" # contains aengine executable

AENGINE_EXECUTABLE = "TMPDIR=" + TMP_DIR + " " + BIN_DIR + "aengine"

FASTQ_DIR = "/tank/test/fastq/"
ALIGNED_BAMS_DIR = "/tank/test/aligned_bams/"
SORTED_BAMS_DIR = "/tank/test/sorted_bams/"
PP_BAMS_DIR = "/tank/test/pp_bams/"
VCF_DIR = "/tank/test/vcf/"

WORKER_REFERENCE_PATH = WORKER_REFERENCE_DIR + "ucsc.hg19/ucsc.hg19.fasta"

def run_command_on_worker(command):
    subp = subprocess.Popen(command, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
    subp.wait()

    return subp.returncode

@app.task
def align_paired_reads(fastq1, fastq2, out_aligned_bam):
    WORKER_fastq1_path = FASTQ_DIR + fastq1
Example #39
0
        "quantaxis_run"
    )

    beat_schedule = {
        'daily': {
            'task': 'quantaxis_run.monitor_daily',
            'schedule': crontab(minute='50', hour='8,12,15,20')
        },
        'trading': {
            'task': 'quantaxis_run.monitor_trading',
            'schedule': timedelta(seconds=10)
        },
    }


app = Celery('quantaxis_run', backend='rpc://', broker=qarun_amqp)
app.config_from_object(celeryconfig)

# A task being bound means the first argument to the task will always be the task instance (self), just like Python bound methods:


@app.task(bind=True)
def quantaxis_run(self, shell_cmd, program='python', taskid=True):
    client_joblist = pymongo.MongoClient(
        host=qarun_mongo_ip, connect=False).quantaxis.joblist
    client_qa = pymongo.MongoClient(
        host=qarun_mongo_ip, connect=False).quantaxis.joblog
    client_qa.create_index([('filename', pymongo.ASCENDING),
                            ('job_id', pymongo.ASCENDING), ('time', pymongo.ASCENDING)])
    filename = shell_cmd
    if taskid:
from __future__ import absolute_import, unicode_literals
from celery import Celery

import os
from project import settings

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'project.settings')

app = Celery('project')

# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('project.settings', namespace='CELERY')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Example #41
0
from celery import Celery

app = Celery('proj',
             backend='redis://localhost',
             broker='redis://localhost',
             include=['proj.tasks'])

# optional configuration, see the application user guide.
app.conf.update(result_expires=3600, )

if __name__ == "__main__":
    app.start()
Example #42
0
from celery.exceptions import (SoftTimeLimitExceeded, TimeLimitExceeded,
                               WorkerLostError, WorkerShutdown,
                               WorkerTerminate)

from httpobs.conf import DEVELOPMENT_MODE
from httpobs.database import (insert_test_results, select_site_headers,
                              update_scan_state)
from httpobs.scanner import celeryconfig, STATE_ABORTED, STATE_FAILED, STATE_RUNNING
from httpobs.scanner.analyzer import tests
from httpobs.scanner.retriever import retrieve_all
from httpobs.scanner.utils import sanitize_headers

import sys

# Create the scanner task queue
scanner = Celery()
scanner.config_from_object(celeryconfig)


@scanner.task()
def scan(hostname: str, site_id: int, scan_id: int):
    try:
        # Once celery kicks off the task, let's update the scan state from PENDING to RUNNING
        update_scan_state(scan_id, STATE_RUNNING)

        print(
            'processing scan_id: {sc_id}, site_id: {st_id}, host: {hostname}'.
            format(sc_id=scan_id, st_id=site_id, hostname=hostname),
            file=sys.stderr)

        # Get the site's cookies and headers
Example #43
0
from celery import Celery


app = Celery('demo')
app.config_from_object('monitor.config')
Example #44
0
from flask import Flask, request, jsonify
from celery import Celery
import argparse
import json
import time
import pickle
import traceback

from dao.db import *
from service.task_manipulation_service import *
from sgn import core

app = Flask(__name__)
app.config['CELERY_BROKER_URL'] = 'redis://127.0.0.1:6379/0'
app.config['CELERY_RESULT_BACKEND'] = 'redis://127.0.0.1:6379/0'
celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)


def parse_arg():
    parser = argparse.ArgumentParser(description='nld-sgn-main')
    parser.add_argument('--host', dest='host', default='0.0.0.0')
    parser.add_argument('--port', dest='port', default='80')
    parser.add_argument('--db_host',
                        dest='db_host',
                        default='db.neurolearn.com')
    parser.add_argument('--db_name', dest='db_name', default='neurolearn')
    parser.add_argument('--db_user', dest='db_user', default='neurolearn')
    parser.add_argument('--db_pwd', dest='db_pwd', default='nl4444_')
    args = parser.parse_args()
    return args
Example #45
0
from celery import Celery
from . import settings

app = Celery('pipeline', broker='redis://localhost')
app.config_from_object(settings)
Example #46
0
import os

from celery import Celery

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')

app = Celery('config')

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
Example #47
0
from __future__ import absolute_import
import os
from celery import Celery
from django.apps import AppConfig
from django.conf import settings


if not settings.configured:
    # set the default Django settings module for the 'celery' program.
    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local')  # pragma: no cover


app = Celery('medusa')


class CeleryConfig(AppConfig):
    name = 'medusa.taskapp'
    verbose_name = 'Celery Config'

    def ready(self):
        # Using a string here means the worker will not have to
        # pickle the object when using Windows.
        app.config_from_object('django.conf:settings')
        app.autodiscover_tasks(lambda: settings.INSTALLED_APPS, force=True)

        if hasattr(settings, 'RAVEN_CONFIG'):
            # Celery signal registration
            from raven import Client as RavenClient
            from raven.contrib.celery import register_signal as raven_register_signal
            from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
Example #48
0
For more information on this file, see
http://celery.readthedocs.org/en/latest/django/first-steps-with-django.html

Run your celery worker(s) as `djcelery`, which is an alias for
`celery -A website_wagtail worker --loglevel=info`.

A celerybeat scheduler can be started together with a worker by `djcelery -B`
or as a separate process:
`celery -A website_wagtail beat
--loglevel=info -s /tmp/celerybeat-schedule`.
It needs to store the last run times of the tasks in a local database file:
if no -s option is provided it defaults to the cwd.
On production it shouldn't be in /tmp/.

"""

from __future__ import absolute_import

import os
from celery import Celery
from django.conf import settings

# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE",
                      "jazminleon.settings.production")

app = Celery("jazminleon")

app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'instagramDownloader.settings')

app = Celery('instagramDownloader', broker='amqp://guest@localhost//')

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
from __future__ import absolute_import

import os

from celery import Celery

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'donation_backend.settings')

from django.conf import settings  # noqa

app = Celery('donation_backend')

# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Example #51
0
 def test_init_no_settings(self):
     celery = Celery(set_as_current=False)
     celery.conf.CELERY_MONGODB_BACKEND_SETTINGS = []
     with self.assertRaises(ImproperlyConfigured):
         MongoBackend(app=celery)
Example #52
0
from celery import Celery
import os

# 告诉celery它里面需要用到的django配置文件在哪里
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "meiduo_mall.settings.dev")

#1. 创建Celery实例对象
celery_app = Celery('meiduo')

# 2.加载配置信息,指定中间人
celery_app.config_from_object('celery_tasks.config')
# 3.自动注册任务----当前celery只处理那些任务
# 复数形式---用列表
celery_app.autodiscover_tasks(['celery_tasks.sms',"celery_tasks.email"])
Example #53
0
import os
from celery import Celery

# 实例化对象
celery_app = Celery('dailyfresh')

# 把celery和django进行组合,必须让celery能识别和加载django的配置文件以及django的类库
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dailyfresh.settings')

# 对django框架执行初始化
import django
django.setup()

# 加载配置
celery_app.config_from_object('celery_tasks.config')

# 注册任务
celery_app.autodiscover_tasks(
    ['celery_tasks.email', 'celery_tasks.index_static'])
Example #54
0
 def test_init_settings_is_None(self):
     celery = Celery(set_as_current=False)
     celery.conf.CELERY_MONGODB_BACKEND_SETTINGS = None
     MongoBackend(app=celery)
Example #55
0
import os

from django.conf import settings

from celery import Celery

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'code_sponsor.settings')

app = Celery('code_sponsor')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
Example #56
0
#!/usr/bin/python3
# coding: utf-8
from __future__ import absolute_import, unicode_literals
import os

from celery import Celery

os.environ.setdefault('DJANGO_SETTINGS_MODULE',
                      'QBuyPro.settings')

app = Celery('QBuyPro',
             broker='redis://119.3.170.97:6378/3',
             backend='redis://119.3.170.97:6378/4',
             namespace='Celery')

app.config_from_object('django.conf:settings')
app.autodiscover_tasks()
Example #57
0
from __future__ import absolute_import

import os
from celery import Celery

from django.conf import settings

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'itaigi.settings')

mia, bitbe, vhost = ('itaigi', ) * 3
tsuki = 'rabbitmq'
rabbitmq = 'amqp://{}:{}@{}:5672/{}'.format(mia, bitbe, tsuki, vhost)

app = Celery('itaigi', backend='amqp', broker=rabbitmq)

# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Example #58
0
#celery启动的入口"创建celery 对象,加载celery配置,注册celery
from celery import Celery

# 为celery使用django配置文件进行设置
import os
if not os.getenv('DJANGO_SETTINGS_MODULE'):
    os.environ['DJANGO_SETTINGS_MODULE'] = 'meiduo.settings.dev'

celery_app = Celery('celery_meiduo')

#加载celery
celery_app.config_from_object('celery_tasks.config')

#注册celery任务
celery_app.autodiscover_tasks(
    ['celery_tasks.sms', 'celery_tasks.email', 'celery_tasks.html'])
import os
import sys
try:
    sys.base_prefix
except:
    sys.base_prefix = getattr(sys, 'base_prefix', getattr(sys, 'real_prefix', sys.prefix))

def get_conf_dict(library_prefix):
    files = {"gff":".gff","ref":".fasta","ann":".ann.txt","barcode":".barcode.bed","bed":".bed","json_db":".dr.json","version":".version.json"}
    conf = {}
    for key in files:
        sys.stderr.write("Using %s file: %s\n" % (key,library_prefix+files[key]))
        conf[key] = pp.filecheck(library_prefix+files[key])
    return conf

app = Celery('tasks', broker='pyamqp://guest@localhost//')


@app.task
def tbprofiler(fq1,fq2,uniq_id,db,storage_dir,platform):
    conf = get_conf_dict(sys.base_prefix+"/share/tbprofiler/tbdb")
    drug_order = ["isoniazid","rifampicin","ethambutol","pyrazinamide","streptomycin","ethionamide","fluoroquinolones","amikacin","capreomycin","kanamycin"]

    if fq1 and fq2:
        fastq_obj = pp.fastq(fq1,fq2)
    elif fq1 and fq2==None:
        fastq_obj = pp.fastq(fq1)
    files_prefix = storage_dir+"/"+uniq_id
    bam_obj = fastq_obj.map_to_ref(
        ref_file=conf["ref"], prefix=files_prefix,sample_name=uniq_id,
        aligner="bwa", platform=platform, threads=4
Example #60
0
import os
from celery import Celery
from django.conf import settings

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')

app = Celery(broker=settings.CELERY_BROKER_URL)
if settings.DEBUG:
    app.conf.task_default_queue = 'celery_dev'
else:
    app.conf.task_default_queue = os.environ.get('CELERY_QUEUE_NAME')

app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)

if __name__ == '__main__':
    app.start()