Exemple #1
0
def register_scheduled_tasks(sender: Celery, **kwargs):
    """Register tasks with a celery beat schedule."""
    # Check for any draft versions that need validation every minute
    sender.add_periodic_task(
        timedelta(seconds=settings.DANDI_VALIDATION_JOB_INTERVAL),
        validate_draft_version_metadata.s(),
    )

    # Send daily email to admins containing a list of users awaiting approval
    sender.add_periodic_task(crontab(hour=0, minute=0),
                             send_pending_users_email.s())
Exemple #2
0
def _get_current_app():
    if default_app is None:
        #: creates the global fallback app instance.
        from celery.app.base import Celery
        set_default_app(Celery(
            'default', fixups=[], set_as_current=False,
            loader=os.environ.get('CELERY_LOADER') or 'default',
        ))
    return _tls.current_app or default_app
Exemple #3
0
def init_celery():
    
    from reliam.common.app import startup_app
    
#     os.environ.setdefault(ResourceLoader.ENV_VAR_NAME,
#                           '/home/www-data/reliam2/resources/prod')
    
    app = startup_app()
    celery = Celery(app.import_name)
    celery.conf.add_defaults(app.config)
    # celery.log.setup(loglevel=logging.INFO, logfile=app.config.get('CELERYD_LOG_FILE'),
    #                  redirect_stdouts=True, redirect_level='INFO')
    celery.app = app
    
    TaskBase = celery.Task
    class ContextTask(TaskBase):
        abstract = True
        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
            
    celery.Task = ContextTask
    return celery
Exemple #4
0
# -*- encoding: utf-8 -*-
'''
Created on 2016年8月1日

@author: hua
'''
from celery.app.base import Celery
# from cabbage.cabbage_celery.cabbage_holder import CabbageHolder
# print CabbageHolder.getCabbage().ping("ubuntu")


# test ="amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"
# app = Celery('cabbage',backend="rpc://",broker=test)

server="amqp://*****:*****@10.0.137.88:5672/cabbage_vhost"

app = Celery('*****@*****.**',broker=server)
        
print app.control.ping(timeout=5,destination=["*****@*****.**"])
Exemple #5
0
from celery import shared_task
from celery.app.base import Celery
from customcrawler import celeryconfig
from customcrawler.retry_mechanism import retry_session
from customcrawler.models import URL_details
from sqlalchemy.orm import sessionmaker
from customcrawler.models import db_connect
from customcrawler.settings import AXE_CHECKER_URL, CELERY_BROKER_URL

app = Celery('customcrawler', broker=CELERY_BROKER_URL)
# app = Celery('customcrawler',broker='amqp://*****:*****@rabbitmq:5672',backend='rpc://')
app.config_from_object(celeryconfig)

session_retry = retry_session(retries=5)
headers = {'User-Agent': 'Mozilla/5.0'}


class ProcessTask(object):
    def __init__(self):

        engine = db_connect()
        self.Session = sessionmaker(bind=engine)

    def run(self, base_url, job_data_id):

        url = AXE_CHECKER_URL + base_url

        r = session_retry.get(url=url, headers=headers)

        data = r.json()
# -*- encoding: utf-8 -*-
'''
Created on 2016年8月10日

@author: hua
'''
from celery.app.base import Celery

app = Celery(
    'cabbage',
    backend="rpc://",
    broker='amqp://*****:*****@10.0.137.88:5672/cabbage_vhost'
)

for i in range(1):
    result = app.send_task("product_list_crawler.ProductListCrawlerTask",
                           kwargs={'lid': 1})
    print result

# print "start job"
# i = app.control.inspect()
# result = i.stats()
# print result
# print i.registered_tasks()
# -*- encoding: utf-8 -*-
'''
Created on 2016年9月6日

@author: huawei
'''
from celery.app.base import Celery
from cabbage.utils.host_name import getHostName
test ="amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"
#server="amqp://*****:*****@10.0.137.88:5672/cabbage_vhost"

app = Celery('cabbage',backend="rpc://",broker=test)
# celeryconfig.CELERY_QUEUES =  ( Queue('default', Exchange('default'), routing_key='default'),
#                    Queue('cabbage', Exchange('cabbage'), routing_key='cabbage'))
# app.config_from_object('cabbage.test.celery_test.celeryconfig')
# app.worker_main()
ubuntu="ubuntu"
print app.control.ping(timeout=2,destination=["celery@%s"%ubuntu])
print app.control.broadcast("shutdown", destination=["celery@%s"%ubuntu])
# -*- encoding: utf-8 -*-
'''
Created on 2016年9月5日

@author: huawei
'''
from celery.app.base import Celery
# from cabbage.test.celery_test import celeryconfig
from kombu.entity import Queue, Exchange


test ="amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"
#server="amqp://*****:*****@10.0.137.88:5672/cabbage_vhost"

app = Celery('aaa',backend="rpc://",broker=test)
# celeryconfig.CELERY_QUEUES =  ( Queue('default', Exchange('default'), routing_key='default'),
#                    Queue('cabbage', Exchange('cabbage'), routing_key='cabbage'))
app.config_from_object('cabbage.test.celery_test.celeryconfig')
app.worker_main()
Exemple #9
0
# import os
# import django
# # 设置配置文件
# os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dailyfresh.settings")
# # 初始化django环境
# django.setup()

from celery.app.base import Celery
from django.core.mail import send_mail

from dailyfresh import settings

# 创建celery客户端
# 参数1: 自定义名称
# 参数2: 中间人 使用编号为1的数据库
app = Celery('', broker='redis://127.0.0.1:6379/1')


@app.task
def send_active_mail(username, email, token):
    subject = '天天生鲜注册激活'  # 邮件标题
    message = ''  # 邮件的正文(不带样式)
    from_email = settings.EMAIL_FROM  # 发送者
    recipient_list = [email]  # 接收者, 注意: 需要是一个list
    # 邮件的正文(带有html样式)
    html_message = '<h3>尊敬的%s:</h3>  欢迎注册天天生鲜' \
                   '请点击以下链接激活您的账号:<br/>' \
                   '<a href="http://127.0.0.1:8000/users/active/%s">' \
                   'http://127.0.0.1:8000/users/active/%s</a>' % \
                   (username, token, token)
Exemple #10
0
import os
from celery.app.base import Celery

# 指django项目的配置文件
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "meiduo_mall.settings.dev")

# 创建一个celery应用(对象), 通常一个项目只需要创建一个celery应用就可以了
# 参数1: 自定义的名字
# 参数2: 异步任务保存到redis中
celery_app = Celery(
    'meiduo',
    broker='redis://127.0.0.1:6379/15',
    # backend: 后台, 保存任务执行的返回值
    backend='redis://127.0.0.1:6379/14')

# celery_app = Celery('meiduo')
# # 加载配置文件
# celery_app.config_from_object('celery_tasks.config')

# 扫描指定的包下面的任务函数
celery_app.autodiscover_tasks(
    ['celery_tasks.sms', 'celery_tasks.email', 'celery_task.html'])
Exemple #11
0
# -*- encoding: utf-8 -*-
'''
Created on 2016年6月17日

@author: hua
'''
from celery import events
from celery.app.base import Celery
from celery.events.state import Task



app = Celery('cabbage',backend="rpc://",broker='amqp://172.16.4.134')

if __name__=="__main__":
    print app.events.State().tasks_by_timestamp()
    tasks =  app.events.State().tasks_by_timestamp()
    for uuid, task in tasks:
        print uuid,task 
    print app.events.State().alive_workers()
Exemple #12
0
# -*- encoding: utf-8 -*-
'''
Created on 2016年9月7日

@author: huawei
'''
from celery.app.base import Celery
from kombu.entity import Queue, Exchange
import time

test = "amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"
app = Celery('cabbage', broker=test)
app.config_from_object("cabbage.cabbage_celery.celeryconfig")
# import celeryconfig
# app.config_from_object(celeryconfig)

# for k,v in app.conf.items():
#     print k,v
app.conf.update(
    CELERY_ROUTES={
        'test_nfs_task.TestNfsTask': {
            'queue': 'test',
            'routing_key': 'test'
        },
        #                  'product_list_crawler.ProductListCrawlerTask': {'queue': 'celery', 'routing_key': 'celery'}
    })
# app.conf.update(CELERY_QUEUES=( Queue('hdfs', Exchange('hdfs'), routing_key='hdfs'),))
# print app.conf["CELERY_RESULT_BACKEND"]
results = []
for i in range(10000):
    print i
Exemple #13
0
# -*- encoding: utf-8 -*-
'''
Created on 2016年8月1日

@author: hua
'''
from celery.app.base import Celery
# from cabbage.cabbage_celery.cabbage_holder import CabbageHolder
# print CabbageHolder.getCabbage().ping("ubuntu")

test = "amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"
# app = Celery('cabbage',backend="rpc://",broker=test)

# server="amqp://*****:*****@10.0.137.88:5672/cabbage_vhost"

app = Celery('celery@huamac', broker=test)

print app.control.ping(timeout=5, destination=["celery@huamac"])
Exemple #14
0
# -*- encoding: utf-8 -*-
'''
Created on 2016年8月10日

@author: hua
'''
from celery.app.base import Celery
from celery.contrib.methods import task_method
from cabbage.job.task import ITask
from zope.interface.declarations import implementer
import os
import threading
import time

app = Celery('cabbage', backend="rpc://", broker='amqp://172.16.4.134')
#
# @implementer(ITask)
# class T:
#     def __init__(self):
#         pass
#
#     @app.task(bind=True, filter=task_method,name="cabbage.test.test_celery.T.run")
#     def run(self):
#         print "121212"
#
#     @app.task(bind=True,filter=task_method,name="cabbage.test.test_celery.T.run2")
#     def run2(self,a,b,no):
#         print "NO:%s"%no
#         time.sleep(5)
#         return a*b
Exemple #15
0
from __future__ import absolute_import

import os

from django.conf import settings
from celery.app.base import Celery
import configurations

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Production')
configurations.setup()

app = Celery('app')
app.config_from_object('django.conf:settings')

app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Exemple #16
0
from celery.app.base import Celery
from django.core.mail import send_mail

from fresh01 import settings
"""
1.创建celery应用对象
2.send_active_email():内部封装激活邮件内容,并用装饰器@app.task注册
3.调用python的send_mail()将激活邮件发送出去
"""

#1.创建celery应用对象
# 参数1是异步任务路径
# 参数2是指定的broker
# redis://密码@redis的ip:端口/数据库
app = Celery('celery_tasks.tasks', broker='redis://127.0.0.1:6379/1')


#2.send_active_email():内部封装激活邮件内容,并用装饰器@app.task注册
@app.task
def send_active_mail(username, email, token):
    subject = '天天生鲜注册激活'  # 邮件标题
    message = ''  # 邮件的正文(不带样式)
    from_email = settings.EMAIL_FROM  # 发送者
    recipient_list = [email]  # 接收者, 注意: 需要是一个list
    # 邮件的正文(带有html样式)
    html_message = '<h3>尊敬的%s:</h3>  欢迎注册天天生鲜' \
                   '请点击以下链接激活您的账号:<br/>' \
                   '<a href="http://127.0.0.1:8000/users/active/%s">' \
                   'http://127.0.0.1:8000/users/active/%s</a>' % \
                   (username, token, token)
Exemple #17
0
# http://stackoverflow.com/questions/24232744/scrapy-spider-not-following-links-when-using-celery

from celery.app import shared_task
from celery.app.base import Celery
from scrapy.crawler import Crawler
from scrapy.conf import settings
from scrapy import log, project, signals
from twisted.internet import reactor
from billiard import Process
from scrapy.utils.project import get_project_settings
from craigslist_sample.spiders.test2 import MySpider

from celery.utils.log import get_task_logger

app = Celery('tasks', broker='amqp://guest@localhost//')
app.config_from_object('celeryconfig')

logger = get_task_logger(__name__)

class UrlCrawlerScript(Process):
        def __init__(self, spider):
            Process.__init__(self)
            settings = get_project_settings()
            self.crawler = Crawler(settings)
            self.crawler.configure()
            # self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
            self.spider = spider

        def run(self):
            self.crawler.crawl(self.spider)
            self.crawler.start()
from celery.utils.log import get_task_logger
from domainmodeller.termextraction.JavaServicesTermExtractor import JavaServicesTermExtractor
from celery.app.base import Celery

from domainmodeller import settings
import domainmodeller.celeryconfig

logger = get_task_logger(__name__)

celery = Celery()
celery.config_from_object(domainmodeller.celeryconfig)


term_extractor = JavaServicesTermExtractor(settings.JAVA_SERVICES)
# Need a timeout because GATE can take a long time if given garbage text
@celery.task(time_limit=60)    
def extract_terms(raw_text, domain_model):
    terms = term_extractor.extract_terms(raw_text, domain_model)
    return terms
# -*- encoding: utf-8 -*-
'''
Created on 2016年10月13日

@author: huawei
'''

from celery.app.base import Celery
from celery.result import AsyncResult
from kombu.entity import Queue, Exchange
import time

# from test_nfs_task import TestNfsTask

test ="amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"
app = Celery('cabbage',broker=test)
app.config_from_object("cabbage.cabbage_celery.celeryconfig")
# import celeryconfig
# app.config_from_object(celeryconfig)


for k,v in app.conf.items():
    print k,v
app.conf.update(CELERY_ROUTES = {     
                 'test_ic_task.TestIcTask': {'queue': 'test2', 'routing_key': 'test2'},     
#                  'product_list_crawler.ProductListCrawlerTask': {'queue': 'celery', 'routing_key': 'celery'}
                  })
# taskId = "de1d0b16-57b1-4128-87bc-3697f78ab6dc"

state = app.events.State()
print app.tasks()
Exemple #20
0
def celery_app_from_logs_dir(logs_dir):
    return Celery(broker=RedisManager.get_broker_url_from_logs_dir(logs_dir),
                  accept_content=['pickle', 'json'])
# http://stackoverflow.com/questions/24232744/scrapy-spider-not-following-links-when-using-celery

from celery.app import shared_task
from celery.app.base import Celery
from scrapy.crawler import Crawler
from scrapy.conf import settings
from scrapy import log, project, signals
from twisted.internet import reactor
from billiard import Process
from scrapy.utils.project import get_project_settings
from craigslist_sample.spiders.test2 import MySpider

from celery.utils.log import get_task_logger

app = Celery('tasks', broker='amqp://guest@localhost//')
app.config_from_object('celeryconfig')

logger = get_task_logger(__name__)


class UrlCrawlerScript(Process):
    def __init__(self, spider):
        Process.__init__(self)
        settings = get_project_settings()
        self.crawler = Crawler(settings)
        self.crawler.configure()
        # self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
        self.spider = spider

    def run(self):
        self.crawler.crawl(self.spider)
Exemple #22
0
from __future__ import absolute_import, unicode_literals

import os
from celery.app.base import Celery

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'project.settings')

app = Celery('project', backend='redis', broker='redis://localhost:6379')

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Exemple #23
0
from celery.app.base import Celery
# from celery.contrib import rdb
from billiard.context import Process
from scrapy.utils.project import get_project_settings
from celery.utils.log import get_task_logger
from scrapy.crawler import CrawlerProcess
# import user spiders
#from Crawlers.spiders import


# Create celery app
celery_app = Celery('tasks',
                    broker='redis://localhost:6379/0',
                    backend='redis://localhost:6379/0')
celery_app.config_from_object('celeryconfig')


class UrlCrawlerScript(Process):
        def __init__(self, spider):
            Process.__init__(self)
            settings = get_project_settings()
            self.crawler = CrawlerProcess(settings)
            # self.crawler.configure()
            # self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
            self.spider = spider

        def run(self):
            self.crawler.crawl(self.spider)
            self.crawler.start()
            # reactor.run()
Exemple #24
0
# -*- encoding: utf-8 -*-
'''
Created on 2016年8月10日

@author: hua
'''
from celery.app.base import Celery

# server ="amqp://*****:*****@123.59.211.146:5672/cabbage_vhost"
# server ="amqp://*****:*****@123.59.211.146:5672/cabbage_vhost_url"
# server="amqp://*****:*****@10.0.137.88:5672/cabbage_vhost"
# server="amqp://*****:*****@192.168.109.38:5672/cabbage_vhost"

server = "amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"

app = Celery('cabbage@huamac', backend="rpc://", broker=server)

# print "start job"
inspect = app.control.inspect()

av = inspect.active()
print av
print "---------------------------------------------"
result = inspect.stats()
print inspect.conf()
print "---------------------------------------------"
for key, v in inspect.active_queues().items():
    queue_names = [i['name'] for i in v]
    print key, queue_names
#
print "---------------------------------------------"
# -*- encoding: utf-8 -*-
'''
Created on 2016年9月7日

@author: huawei
'''
from celery.app.base import Celery
from kombu.entity import Queue, Exchange
import time
from test_hdfs_task import TestHdfsTask

test = "amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"
app = Celery('cabbage', broker=test)
app.config_from_object("cabbage.cabbage_celery.celeryconfig")
# import celeryconfig
# app.config_from_object(celeryconfig)

# for k,v in app.conf.items():
#     print k,v
app.conf.update(
    CELERY_ROUTES={
        'test_hdfs_task.TestHdfsTask': {
            'queue': 'hdfs',
            'routing_key': 'hdfs'
        },
        #                  'product_list_crawler.ProductListCrawlerTask': {'queue': 'celery', 'routing_key': 'celery'}
    })
app.conf.update(
    CELERY_QUEUES=(Queue('hdfs', Exchange('hdfs'), routing_key='hdfs'), ))
print app.conf["CELERY_ACCEPT_CONTENT"]
print app.conf["CELERY_RESULT_BACKEND"]
# -*- encoding: utf-8 -*-
'''
Created on 2016年9月9日

@author: huawei
'''
from celery.app.base import Celery
# from kombu.entity import Exchange,Queue

app = Celery(
    'cabbage',
    backend="rpc://",
    broker=
    'amqp://*****:*****@172.16.4.134:5672/cabbage_vhost')
# app.config_from_object("cabbage.cabbage_celery.celeryconfig")
# app.conf.update(CELERY_QUEUES = ( Queue('test', Exchange('test'), routing_key='test'), Queue('test2', Exchange('test2'), routing_key='test2')),
#                 {u'test_both_task.TestBothTask': {'queue': 'both', 'routing_key': 'both'}, u'test_both_task.TestBoth': {'queue': 'both', 'routing_key': 'both'}, u'test_mac_task.TestMacTask': {'queue': 'mac', 'routing_key': 'mac'}, u'test_ubuntu_task.TestUbuntuTask': {'queue': 'ubuntu', 'routing_key': 'ubuntu'}}
#                 )
#
#
# CELERY_QUEUES = ( Queue('celery', Exchange('celery'), routing_key='celery'),)
#
#
# CELERY_ROUTES ={u'test_both_task.TestBothTask': {'queue': 'both', 'routing_key': 'both'}, u'test_both_task.TestBoth': {'queue': 'both', 'routing_key': 'both'}, u'test_mac_task.TestMacTask': {'queue': 'mac', 'routing_key': 'mac'}, u'test_ubuntu_task.TestUbuntuTask': {'queue': 'ubuntu', 'routing_key': 'ubuntu'}}
#
# app.conf.update(CELERY_ROUTES=CELERY_ROUTES)
#
# CELERY_QUEUES = ( Queue('default', Exchange('default'), routing_key='default'),
# )

Exemple #27
0
from celery import shared_task
from celery.app.base import Celery
from customcrawler import celeryconfig
from customcrawler.retry_mechanism import retry_session
from customcrawler.models import URL_details
from sqlalchemy.orm import sessionmaker
from customcrawler.models import db_connect

app = Celery('customcrawler', broker='amqp://')
app.config_from_object(celeryconfig)

session_retry = retry_session(retries=5)
headers = {'User-Agent': 'Mozilla/5.0'}


class ProcessTask(object):
    def __init__(self):

        engine = db_connect()
        self.Session = sessionmaker(bind=engine)

    def run(self, base_url, job_data_id):

        url = "http://axe.checkers.eiii.eu/export-jsonld/pagecheck2.0/?url=" + base_url

        r = session_retry.get(url=url, headers=headers)

        data = r.json()

        total_violations = 0
        total_verify = 0
import os
from datetime import datetime
from celery.app.base import Celery
from celery.schedules import crontab
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import sessionmaker

celery = Celery('tasks')

default_config = dict(task_ignore_result=False,
                      task_store_errors_even_if_ignored=True,
                      task_track_started=True,
                      timezone='Asia/Singapore',
                      max_retries=3)

try:
    from app import config

    config_local_dict = {k: v for k, v in config.__dict__.items()}
    default_config.update(config_local_dict)
except Exception as e:
    print("No config for local machine")

celery.conf.update(default_config)

celery.conf.beat_schedule = {
    'run_get_scheduled_results': {
        'task':
        'get_scheduled_results',
        'schedule':
    },
    u'test_ubuntu_task.TestUbuntuTask': {
        'queue': 'ubuntu',
        'routing_key': 'ubuntu'
    }
}

mac_routes = {
    u'test_mac_task.TestMacTask': {
        'queue': 'huawei',
        'routing_key': 'huawei'
    }
}

app = Celery(
    'cabbage',
    broker=
    'amqp://*****:*****@172.16.4.134:5672/cabbage_vhost')
app.config_from_object("cabbage.cabbage_celery.celeryconfig")
app.conf.update(CELERY_ROUTES=routes)

app.send_task("test_ubuntu_task.TestUbuntuTask",
              kwargs={
                  "jobId": "job-a986d7d9-4950-4c45-a3c5-6553d81d5a36",
                  "no": 2
              })

origRoutes = app.conf["CELERY_ROUTES"]
print app.tasks
print "CELERY_ROUTES:[%s]" % app.conf["CELERY_ROUTES"]

origRoutes.update(mac_routes)
Exemple #30
0
import os

from celery import platforms
# Prevent main celery proc from killing pre-forked procs,
# otherwise killing celery main proc causes sync main firex proc
# to hang since broker will remain up.
platforms.set_pdeathsig = lambda n: None

from celery.app.base import Celery
from firexkit.task import FireXTask
from celery.signals import celeryd_init
from firexapp.submit.install_configs import install_config_path_from_logs_dir, load_existing_install_configs

firex_app_name = '.'.join(__name__.split(".")[:-1])
app = Celery(strict_typing=False,
             task_cls='%s:%s' % (FireXTask.__module__, FireXTask.__name__))
app.config_from_object(firex_app_name + '.default_celery_config')


@celeryd_init.connect
def add_items_to_conf(conf=None, **_kwargs):
    conf.uid = app.backend.get('uid').decode()
    conf.logs_dir = app.backend.get('logs_dir').decode()
    conf.link_for_logo = conf.logs_dir
    conf.logs_url = None
    conf.resources_dir = app.backend.get('resources_dir').decode()

    install_config = install_config_path_from_logs_dir(conf.logs_dir)
    assert os.path.isfile(install_config), \
        f"Install config missing from run, firexapp submit is expected to have populated it: {install_config}"
Exemple #31
0
# 在celery服务器所在的项目中,
# 需要手动添加如下代码,初始化django环境
# import os
# import django
# os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Dailyfresh.settings")
# django.setup()

from celery.app.base import Celery
from django.core.mail import send_mail
from Dailyfresh import settings

app = Celery('Dailyfresh', broker='redis://127.0.0.1:6379/1')


@app.task
def send_active_mail(username, email, token):
    """发送激活邮件"""
    subject = "天天生鲜用户激活"  # 标题, 不能为空,否则报错
    message = ""  # 邮件正文(纯文本)
    from_email = settings.EMAIL_FROM  # 发件人
    recipient_list = [email]  # 接收人, 需要是列表
    # 邮件正文(带html样式)
    html_message = ('<h3>尊敬的%s:感谢注册天天生鲜</h3>'
                    '请点击以下链接激活您的帐号:<br/>'
                    '<a href="http://127.0.0.1:8000/users/active/%s">'
                    'http://127.0.0.1:8000/users/active/%s</a>') % (
                        username, token, token)
    send_mail(subject,
              message,
              from_email,
              recipient_list,
# -*- encoding: utf-8 -*-
'''
Created on 2016年8月26日

@author: huawei
'''
from celery.app.base import Celery
from concurrent.futures.thread import ThreadPoolExecutor
from functools import partial
import collections
import time

appCelery = Celery(
    'cabbage',
    backend="rpc://",
    broker='amqp://*****:*****@10.0.137.88:5672/cabbage_vhost'
)

INSPECT_METHODS = ('stats', 'active_queues', 'registered', 'scheduled',
                   'active', 'reserved', 'revoked', 'conf')
worker_cache = collections.defaultdict(dict)
pool = ThreadPoolExecutor(4)


def update_workers(app, workername=None):
    futures = []
    destination = None
    #     timeout = app.inspect_timeout / 1000.0
    inspect = app.control.inspect()
    for method in INSPECT_METHODS:
        print getattr(inspect, method)()
Exemple #33
0
import os
import sys
import threading
import time
# if "/Users/hua/workspace/python/cabbage" not in sys.path:
#         sys.path.append("/Users/hua/workspace/python/cabbage")
#         sys.path.append("/Users/hua/workspace/python/cabbage/src")
        


cfg = ConfigParser.ConfigParser()
cfg.read(os.getcwd().split("cabbage")[0]+'cabbage/cabbage.cfg')



app = Celery('cabbage',backend="amqp",broker='amqp://172.16.4.134')
app.config_from_object("cabbage.test.celery_test.celeryconfig")

@implementer(ITask)  
class T:
    def __init__(self):
        pass
    
    @app.task(bind=True, filter=task_method,name="cabbage.test.test_celery.T.run")     
    def run(self):
        print "121212"
    
    @app.task(bind=True,filter=task_method,name="cabbage.test.test_celery.T.run2")     
    def run2(self,a,b):
        print "121212"
        return a*b