import os
import celery

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'file_storage.settings')

app = celery.Celery('file_storage')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.conf import settings

import celery
import os

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'AIC_runner.settings')

app = celery.Celery('AIC')

app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
Exemple #3
0
import logging
import pandas as pd
import sqlalchemy
import uuid
import zlib

from sqlalchemy.pool import NullPool
from sqlalchemy.orm import sessionmaker

from caravel import (app, db, models, utils, dataframe, results_backend)
from caravel.db_engine_specs import LimitMethod
from caravel.jinja_context import get_template_processor

QueryStatus = models.QueryStatus

celery_app = celery.Celery(config_source=app.config.get('CELERY_CONFIG'))


def is_query_select(sql):
    return sql.upper().startswith('SELECT')


def create_table_as(sql, table_name, schema=None, override=False):
    """Reformats the query into the create table as query.

    Works only for the single select SQL statements, in all other cases
    the sql query is not modified.
    :param sql: string, sql query that will be executed
    :param table_name: string, will contain the results of the query execution
    :param override, boolean, table table_name will be dropped if true
    :return: string, create table as query
def main():  # pragma: no cover
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--broker',
        dest='broker',
        default=DEFAULT_BROKER,
        help="URL to the Celery broker. Defaults to {}".format(DEFAULT_BROKER))
    parser.add_argument(
        '--transport-options',
        dest='transport_options',
        help=("JSON object with additional options passed to the underlying "
              "transport."))
    parser.add_argument(
        '--addr',
        dest='addr',
        default=DEFAULT_ADDR,
        help="Address the HTTPD should listen on. Defaults to {}".format(
            DEFAULT_ADDR))
    parser.add_argument('--tz',
                        dest='tz',
                        help="Timezone used by the celery app.")
    parser.add_argument('--verbose',
                        action='store_true',
                        default=False,
                        help="Enable verbose logging")
    parser.add_argument('--version',
                        action='version',
                        version='.'.join([str(x) for x in __VERSION__]))
    opts = parser.parse_args()

    if opts.verbose:
        logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
    else:
        logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)

    signal.signal(signal.SIGINT, shutdown)
    signal.signal(signal.SIGTERM, shutdown)

    if opts.tz:
        os.environ['TZ'] = opts.tz
        time.tzset()

    app = celery.Celery(broker=opts.broker)

    if opts.transport_options:
        try:
            transport_options = json.loads(opts.transport_options)
        except ValueError:
            print(
                "Error parsing broker transport options from JSON '{}'".format(
                    opts.transport_options),
                file=sys.stderr)
            sys.exit(1)
        else:
            app.conf.broker_transport_options = transport_options

    setup_metrics(app)

    t = MonitorThread(app=app)
    t.daemon = True
    t.start()
    w = WorkerMonitoringThread(app=app)
    w.daemon = True
    w.start()
    start_httpd(opts.addr)
    t.join()
    w.join()
Exemple #5
0
#!/usr/bin/env python

import celery

from datetime import datetime, timedelta

localhost = <localhost>

app = celery.Celery('tasks', backend='amqp', broker='amqp://')


def main():

    print(str(datetime.utcnow()) + ' (CURRENT UTC TIME)\n')

    i = app.control.inspect()
    list1 = i.scheduled()["celery@" + localhost]
    for i in list1:
        timecode = i['eta'].replace('T', ' ')
        print(timecode + '  ' + i['request']['args'])

    return 0


main()
Exemple #6
0
import logging
import sys

import celery
import structlog
from celery.signals import setup_logging, worker_process_init
from django_structlog.celery.steps import DjangoStructLogInitStep

logger = structlog.get_logger(__name__)

app = celery.Celery("secateur")
app.config_from_object("django.conf:settings", namespace="CELERY")
app.steps["worker"].add(DjangoStructLogInitStep)


@worker_process_init.connect(weak=False)
def init_celery_tracing(*args, **kwargs):
    import secateur.otel


@setup_logging.connect
def receiver_setup_logging(  # type: ignore
        loglevel, logfile, format, colorize, **kwargs):  # pragma: no cover
    logging.config.dictConfig({
        "version": 1,
        "disable_existing_loggers": False,
        "formatters": {
            "plain_console": {
                "()": structlog.stdlib.ProcessorFormatter,
                "processor": structlog.dev.ConsoleRenderer(colors=False),
            },
import celery

app = celery.Celery('mergesort', broker='amqp://', backend='redis://')


@app.task
def sort(xs):
    lenxs = len(xs)
    if (lenxs <= 1):
        return (xs)

    half_lenxs = lenxs // 2
    left = xs[:half_lenxs]
    right = xs[half_lenxs:]
    return (merge(sort(left), sort(right)))


def merge(left, right):
    nleft = len(left)
    nright = len(right)

    merged = []
    i = 0
    j = 0
    while i < nleft and j < nright:
        if (left[i] < right[j]):
            merged.append(left[i])
            i += 1
        else:
            merged.append(right[j])
            j += 1
import celery
import pytest
import time
import os
import logging

from src.config import Config
from src.utils.running_reports_repository import RunningReportsRepository
from src.orderlyweb_client_wrapper import OrderlyWebClientWrapper
from test.integration.yt_utils import YouTrackUtils
from test.integration.file_utils import write_text_file

app = celery.Celery(broker="redis://guest@localhost//", backend="redis://")
reports_sig = "run-diagnostic-reports"
archive_folder_sig = "archive_folder_contents"
yt = YouTrackUtils()


@pytest.fixture(autouse=True)
def cleanup_tickets(request):
    request.addfinalizer(yt.cleanup)


@pytest.fixture(scope="session")
def docker(pytestconfig):
    return pytestconfig.getoption("docker")


def test_run_diagnostic_reports():
    versions = app.signature(reports_sig, [
        "testGroup", "testDisease", yt.test_touchstone, "2020-11-04T12:21:15",
Exemple #9
0
        'refresh_legal_docs': {
            'task': 'webservices.tasks.legal_docs.refresh',
            'schedule': crontab(minute=[5, 20, 35, 50]),
        },
    }


def redis_url():
    redis = env.get_service(label='redis32')
    if redis:
        url = redis.get_url(host='hostname', password='******', port='port')
        return 'redis://{}'.format(url)
    return env.get_credential('FEC_REDIS_URL', 'redis://localhost:6379/0')


app = celery.Celery('openfec')
app.conf.update(broker_url=redis_url(),
                imports=(
                    'webservices.tasks.refresh',
                    'webservices.tasks.download',
                    'webservices.tasks.legal_docs',
                ),
                beat_schedule=schedule,
                task_acks_late=False)

app.conf.ONCE = {
    'backend': 'celery_once.backends.Redis',
    'settings': {
        'url': redis_url(),
        'default_timeout': 60 * 60
    }
Exemple #10
0
# coding=utf-8
import time
import celery
from kombu import Queue, Exchange

app = celery.Celery('tasks', broker='redis://')
app.conf.update(CELERY_DEFAULT_QUEUE='queue_c4')


@app.task(name='hello')
def s(msg):
    print msg
Exemple #11
0
import celery, os, requests, json

_post_msg_url = 'https://graph.facebook.com/v2.6/me/messages?access_token=' + os.environ[
    'FBOT_ACCESS_TOKEN']

app = celery.Celery('demo')
app.conf.update(BROKER_URL=os.environ['CLOUDAMQP_URL'], BROKER_POOL_LIMIT=20)

##########
# Celery #
##########


@app.task
def add(x, y):
    print 'testing add'
    return x + y


@app.task
def process(data):
    if 'message' in data['entry'][0]['messaging'][
            0]:  # The 'messaging' array may contain multiple messages.  Need fix.
        sender_id = data['entry'][0]['messaging'][0]['sender']['id']
        message = data['entry'][0]['messaging'][0]['message']['text']
        # sending messages will be moved out of this module.
        resp_data = {
            "recipient": {
                "id": sender_id
            },
            "message": {
Exemple #12
0
import os

import celery

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'jobs.settings')

app = celery.Celery('jobs')
app.config_from_object('django.conf:settings', namespace='CELERY')

app.autodiscover_tasks()
#  tasks.py
import celery
from sqlalchemy import create_engine, text
from sqlalchemy.exc import SQLAlchemyError, IntegrityError
from sqlalchemy.orm import sessionmaker, scoped_session
from app.models.data import Extrinsic, Block, Log, Event
from app.processors.converters import PolkascanHarvesterService, HarvesterCouldNotAddBlock, \
    HarvesterNotshardParamsError, BlockAlreadyAdded
from substrateinterface import SubstrateInterface

from app.settings import DB_CONNECTION, DEBUG, TYPE_REGISTRY, SHARDS_TABLE, NUM

CELERY_BROKER = 'redis://redis:6379/0'
CELERY_BACKEND = 'redis://redis:6379/0'

app = celery.Celery('tasks', broker=CELERY_BROKER, backend=CELERY_BACKEND)

app.conf.beat_schedule = {
    'shard0-check-head-10-seconds': {
        'task': 'app.tasks.start_harvester',
        'schedule': 13.0,
        'args': ("shard.0", )
    },
    'shard1-check-head-10-seconds': {
        'task': 'app.tasks.start_harvester',
        'schedule': 13.0,
        'args': ("shard.1", )
    },
    'shard2-check-head-10-seconds': {
        'task': 'app.tasks.start_harvester',
        'schedule': 13.0,
Exemple #14
0
def get_celery_app(name=os.getenv('APP_NAME', 'worker'),
                   auth_url=os.getenv('WORKER_BROKER_URL',
                                      'redis://localhost:6379/11'),
                   backend_url=os.getenv('WORKER_BACKEND_URL',
                                         'redis://localhost:6379/12'),
                   include_tasks=[],
                   ssl_options=None,
                   transport_options=None,
                   path_to_config_module=os.getenv(
                       'WORKER_CELERY_CONFIG_MODULE',
                       'analysis_engine.work_tasks.celery_config'),
                   worker_log_format=os.getenv(
                       'WORKER_LOG_FORMAT',
                       '%(asctime)s: %(levelname)s %(message)s'),
                   **kwargs):
    """get_celery_app

    Build a Celery app with support for environment variables
    to set endpoints locations.

    - export WORKER_BROKER_URL=redis://localhost:6379/11
    - export WORKER_BACKEND_URL=redis://localhost:6379/12
    - export WORKER_CELERY_CONFIG_MODULE=analysis_engine.work_tasks.cel
      ery_config

    .. note:: Jupyter notebooks need to use the
        ``WORKER_CELERY_CONFIG_MODULE=analysis_engine.work_tasks.celery
        service_config`` value which uses resolvable hostnames with
        docker compose:

        - export WORKER_BROKER_URL=redis://redis:6379/11
        - export WORKER_BACKEND_URL=redis://redis:6379/12

    :param name: name for this app
    :param auth_url: Celery broker address
        (default is ``redis://localhost:6379/11``
        or ``analysis_engine.consts.WORKER_BROKER_URL``
        environment variable)
        this is required for distributing algorithms
    :param backend_url: Celery backend address
        (default is ``redis://localhost:6379/12``
        or ``analysis_engine.consts.WORKER_BACKEND_URL``
        environment variable)
        this is required for distributing algorithms
    :param include_tasks: list of modules containing tasks to add
    :param ssl_options: security options dictionary
        (default is ``analysis_engine.consts.SSL_OPTIONS``)
    :param trasport_options: transport options dictionary
        (default is ``analysis_engine.consts.TRANSPORT_OPTIONS``)
    :param path_to_config_module: config module for advanced
        Celery worker connectivity requirements
        (default is ``analysis_engine.work_tasks.celery_config``
        or ``analysis_engine.consts.WORKER_CELERY_CONFIG_MODULE``)
    :param worker_log_format: format for logs
    """

    if len(include_tasks) == 0:
        log.error('creating celery app={} MISSING tasks={}'.format(
            name, include_tasks))
    else:
        log.info('creating celery app={} tasks={}'.format(name, include_tasks))

    # get the Celery application
    app = celery.Celery(name,
                        broker_url=auth_url,
                        result_backend=backend_url,
                        include=include_tasks)

    app.config_from_object(path_to_config_module, namespace='CELERY')

    app.conf.update(kwargs)

    if transport_options:
        log.info('loading transport_options={}'.format(transport_options))
        app.conf.update(**transport_options)
    # custom tranport options

    if ssl_options:
        log.info('loading ssl_options={}'.format(ssl_options))
        app.conf.update(**ssl_options)
    # custom ssl options

    if len(include_tasks) > 0:
        app.autodiscover_tasks(include_tasks)

    return app
Exemple #15
0
from __future__ import absolute_import, unicode_literals

from django.conf import settings

import celery
import os

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'participa.settings')

app = celery.Celery('participa')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
Exemple #16
0
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals

import celery
import os
import raven
import sys

from django.conf import settings
from raven.contrib.celery import register_signal, register_logger_signal

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'temba.settings')

app = celery.Celery('temba')

app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)

# register raven if configured
raven_config = getattr(settings, 'RAVEN_CONFIG', None)
if raven_config:  # pragma: no cover
    client = raven.Client(settings.RAVEN_CONFIG['dsn'])
    register_logger_signal(client)
    register_signal(client)


@app.task(bind=True)
def debug_task(self):  # pragma: needs cover
    print('Request: {0!r}'.format(self.request))
{% if cookiecutter.use_celery == 'y' %}
import os
{%- if cookiecutter.use_sentry %}
import raven
from raven.contrib.celery import register_signal, register_logger_signal
{%- endif %}
import celery
from django.conf import settings

os.environ.setdefault('DJANGO_SETTINGS_MODULE', '{{cookiecutter.project_slug}}.settings')

{%- if cookiecutter.use_sentry %}
class Celery(celery.Celery):

    def on_configure(self):
        if settings.DSN:
            client = raven.Client(settings.DSN)
            register_logger_signal(client)
            register_signal(client)
        else:
            pass

app = Celery(__name__)
{% else %}
app = celery.Celery(__name__)
{%- endif %}
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
{%- endif %}
from __future__ import absolute_import

import celery

app = celery.Celery("quickstats")

# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object("django.conf:settings", namespace="CELERY")
app.autodiscover_tasks()
Exemple #19
0
from orm import Facade as orm_facade
from usecase import recommend as recommend_usecase
from task import oss, sms

logger = get_task_logger(__name__)

# Configs
config_filepath = environ.get("MORER_CONFIG")
if not config_filepath:
    config_filepath = "config/default.toml"
config = toml.load(config_filepath)
mysql_config = config["mysql"]
redis_config = config["redis"]

app = celery.Celery('task')
app.conf.update(**config["celery"])

app.conf.beat_schedule = {
    'update_recommend_per_hour': {  # 每小时更新一次推荐看护人列表
        'task': 'task.update_recommend_carers',
        'schedule': 300
    }
}


class MyTask(celery.Task):
    def on_success(self, retval, task_id, args, kwargs):
        print('task done: {0}'.format(retval))
        return super(MyTask, self).on_success(retval, task_id, args, kwargs)
Exemple #20
0
import celery
from cell.actors import Actor
from cell.agents import dAgent
from kombu.utils import uuid
from examples.workflow import forward

my_app = celery.Celery(broker='pyamqp://guest@localhost//')
agent = dAgent(connection=my_app.broker_connection())


class Adder(Actor):
    def __init__(self, connection=None, *args, **kwargs):
        super(Adder, self).__init__(connection or my_app.broker_connection(),
                                    *args, **kwargs)

    class state():
        def add_one(self, i, token=None):
            print 'Increasing %s with one' % i
            res = i + 1
            self.actor.emit('count', {'res': res, 'token': token})
            return res


class Counter(Actor):
    def __init__(self, connection=None, *args, **kwargs):
        super(Counter, self).__init__(connection or my_app.broker_connection(),
                                      *args, **kwargs)

    class state():
        def __init__(self):
            self.targets = {}
Exemple #21
0
import celery

app = celery.Celery(
    'mergesort',
    broker=
    'amqp://*****:*****@PROD-JOB-844fd7d2202ac4da.elb.us-east-2.amazonaws.com',
    backend=
    'amqp://*****:*****@PROD-JOB-844fd7d2202ac4da.elb.us-east-2.amazonaws.com'
)

#run this code with "celery -A mergesort worker --loglevel=info".
#Then this machine will become a worker, and will be able to run the app task, i.e. the sort function, whenever the broker requests it.


@app.task
def sort(xs):
    lenxs = len(xs)
    if (lenxs <= 1):
        return (xs)

    half_lenxs = lenxs // 2
    left = xs[:half_lenxs]
    right = xs[half_lenxs:]
    # call the sort function recursively and merge the results with the merge function
    return (merge(sort(left), sort(right)))


def merge(left, right):
    nleft = len(left)
    nright = len(right)
Exemple #22
0
import os
from datetime import datetime

import celery
from django.conf import settings
from django.core.mail import EmailMessage
from fcm_django.fcm import fcm_send_message

app = celery.Celery('school_1329_server')

app.conf.update(BROKER_URL=os.getenv('REDIS_URL'),
                CELERY_RESULT_BACKEND=os.getenv('REDIS_URL'),
                CELERY_ALWAYS_EAGER=settings.DEBUG)


@app.task()
def send_email(subject, body, to):
    if not isinstance(to, list):
        to = [to]

    email = EmailMessage(subject, body, to=to)
    return email.send()


@app.task()
def send_push(fcm_token, title, body):
    return fcm_send_message(fcm_token, title, body)


@app.task()
def send_notifications(notification_pk):
Exemple #23
0
def get_celery_app(config):
    global _celery_app
    if _celery_app:
        return _celery_app
    _celery_app = celery.Celery(config_source=config.get('CELERY_CONFIG'))
    return _celery_app
Exemple #24
0
from __future__ import absolute_import, unicode_literals
import celery
import os

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'community.settings')

# config celery settings
app = celery.Celery('community')

app.config_from_object('django.conf:settings', namespace='CELERY')

# autodiscover tasks in installed apps
app.autodiscover_tasks()


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Exemple #25
0
"""
celery-task part

please type following command in the same directory with current file:

celery -A celery_task worker -l info

to start listening on the rabbitMQ to wait for tasks
"""

import celery
import scanner

broker_url = "amqp://192.168.2.12"
backend_url = "redis://192.168.2.12"

app = celery.Celery("scanner", broker=broker_url, backend=backend_url)
app.conf.task_acks_late = True
app.conf.worker_prefetch_multiplier = 1

scanner = scanner.Scanner()


@app.task
def submit_scan(name, target, policy, description=None):
    data = scanner.scan_task(name, target, policy, description)
    return data  # data to be restored in redis
Exemple #26
0
import cPickle as pickle
import functools
import itertools
import logging
import time

l = logging.getLogger('claripy.backends.remotetasks')

from . import celeryconfig

import celery
from celery.contrib import rdb
import pymongo
import bson

app = celery.Celery('tasks', broker='amqp://guest@localhost//', backend='mongodb://localhost/')
app.config_from_object(celeryconfig)
z3 = None
mongo = None

@celery.signals.celeryd_after_setup.connect
def init_z3_and_conns(sender, instance, **kwargs):
    global z3, mongo
    z3 = claripy.backends.BackendZ3()
    z3.set_claripy_object(claripy.Claripies['SerialZ3'])
    mongo = pymongo.MongoClient()['lemma_cache']

def canonicalize_all(exprs):
    known_vars = {}
    counter = itertools.count()
    return known_vars, [expr.canonicalized(existing_vars=known_vars, counter=counter)[1] for expr in exprs]
Exemple #27
0
import pylibmc

import remoulade
from remoulade.brokers.rabbitmq import RabbitmqBroker
from remoulade.brokers.redis import RedisBroker

logger = logging.getLogger("example")
counter_key = "latench-bench-counter"
memcache_client = pylibmc.Client(["localhost"], binary=True)
memcache_pool = pylibmc.ClientPool(memcache_client, 8)
random.seed(1337)

if os.getenv("REDIS") == "1":
    broker = RedisBroker()
    remoulade.set_broker(broker)
    celery_app = celery.Celery(broker="redis:///")

else:
    broker = RabbitmqBroker(host="127.0.0.1")
    remoulade.set_broker(broker)
    celery_app = celery.Celery(broker="amqp:///")


def fib_bench(n):
    p, q = 0, 1
    while n > 0:
        p, q = q, p + q
        n -= 1

    with memcache_pool.reserve() as client:
        client.incr(counter_key)
Exemple #28
0
import celery
import time

broker_url = "amqp://192.168.2.12"
backend_url = "redis://192.168.2.12"

app = celery.Celery(broker=broker_url, backend=backend_url)
ip_list = ["192.168.2.11", "192.168.2.12", "192.168.2.15", "192.168.2.12", "192.168.2.10"]
task_list = []

for ip in ip_list:
    task = app.send_task("celery_task.submit_scan", ["celery_launched", ip, "ubuntu"])
    task_list.append(task)

# wait and show the result
while task_list:
    for task in reversed(task_list):
        if not task.ready():
            print("working...")
        else:
            task_list.remove(task)
            # print(task.get())
            # data = task.get()
            # 1. save to mysql: data['details'], data['vulns']
            # 2. email sending: data['report']
            print("[{0}] done".format(task.get()['details']['target']))

        time.sleep(10)

Exemple #29
0
# SPDX-License-Identifier: GPL-3.0-or-later
import sys

import celery
from celery.signals import celeryd_init, task_postrun, task_prerun

from cachito.workers.celery_logging import (
    cleanup_task_logging,
    cleanup_task_logging_customization,
    setup_task_logging,
    setup_task_logging_customization,
)
from cachito.workers.config import configure_celery, validate_celery_config

# Workaround https://github.com/celery/celery/issues/5416
if celery.version_info < (4, 3) and sys.version_info >= (
        3, 7):  # pragma: no cover
    from re import Pattern
    from celery.app.routes import re as routes_re

    routes_re._pattern_type = Pattern

app = celery.Celery()
configure_celery(app)
celeryd_init.connect(validate_celery_config)
task_prerun.connect(setup_task_logging_customization)
task_prerun.connect(setup_task_logging)
task_postrun.connect(cleanup_task_logging_customization)
task_postrun.connect(cleanup_task_logging)
Exemple #30
0
from __future__ import absolute_import

import os

import celery

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cbug.settings')

app = celery.Celery('cbug-cellar')

app.config_from_object('django.conf:settings', namespace="CELERY")
app.autodiscover_tasks()