def hello_world(x=16, y=16): x = int(request.args.get("x", x)) y = int(request.args.get("y", y)) res = add.apply_async((x, y)) context = {"id": res.task_id, "x": x, "y": y} result = "add((x){}, (y){})".format(context['x'], context['y']) goto = "{}".format(context['id']) return jsonify(result= result, goto=goto)
def hello_world(x=16, y=16): x = int(request.args.get("x", x)) y = int(request.args.get("y", y)) res = add.apply_async((x, y)) context = {"id": res.task_id, "x": x, "y": y} task = "wait for %d seconds and then return %d" % (x, y) goto = "{}".format(context['id']) return render_template("task.html", task=task, goto="http://127.0.0.1:5000/result/", task_id=str(context['id']))
def hello_world(x=16, y=16): x = int(request.args.get("x", x)) y = int(request.args.get("y", y)) res = add.apply_async((x, y)) context = {"id": res.task_id, "x": x, "y": y} result = "add((x){}, (y){})".format(context['x'], context['y']) goto = "{}".format(context['id']) return jsonify(result=result, goto=goto)
def new_task(): try: result = add.apply_async(args = [10, 20]) list_result.append(result) url_for('result_task', id_result = id_result) except Exception as e: print(str(e)) return result.id
def do(n=10000): """ 异步执行1w个add任务 """ for i in xrange(0, n): res = add.apply_async([i, 0]) sys.stdout.write('\r{}:{}%,{},{}'.format(n, 100 * i/n, i, res.id)) sys.stdout.flush() sys.stdout.write('\n done!')
def bulk_add(num=1000): publisher = add.get_publisher() try: start = time() results = [add.apply_async(args=(i, i + 1), publisher=publisher) for i in range(num)] t_in = time() - start finally: publisher.close() publisher.connection.close() start = time() [res.get() for res in results] t_out = time() - start return t_in, t_out
def arrange_tasks(): # normal call # add(0, 10) # send task message add.delay(1, 10) add.apply_async((2, 10)) # executes 10 seconds from now add.apply_async((3, 10), countdown=5) # executes 10 seconds from now - eta add.apply_async((4, 10), eta=datetime.utcnow() + timedelta(seconds=6)) # linking(callbacks / errbacks) add.apply_async((5, 10), link=add.s(7))
def bulk_add(num=1000): publisher = add.get_publisher() try: start = time() results = [ add.apply_async(args=(i, i + 1), publisher=publisher) for i in range(num) ] t_in = time() - start finally: publisher.close() publisher.connection.close() start = time() [res.get() for res in results] t_out = time() - start return t_in, t_out
# vim: set fileencoding=utf-8 from tasks import add, multiply for i in range(10): add.apply_async((i, i + 1), link=multiply.s(i))
from tasks import add, generate_picture #result = generate_picture.apply_async((u'first', 100, 50, u'JPEG', 96.0, 7), queue='TEMPLATE.Q') #result = add.apply_async((7, 3), queue='hipri') result = add.apply_async((2, 2), queue='hipri') #print result.backend #while not result.ready(): # pass print result.status
def index(): result = add.apply_async(args=[40, 69]) print result return str(result.get())
def index(): res = add.apply_async((13, 14)) retval = add.AsyncResult(res.id).get(timeout=1.0) return render_template('index.html', retval=retval)
from tasks import add, mul x = random.choice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) y = random.choice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) print "x={0}, {1}".format(x, y) result = mul.delay(x, y) print result.id # message id print "mul.delay(x, y)={0}".format(result.get()) result = mul.apply_async((x, y), countdown=5) # Same as add.delay(2, 2) print result.id # message id print "mul.delay(x, y)={0}".format(result.get()) result = add.apply_async((2, 2)) print result.get() print 'result.successful(): {0}'.format(result.successful()) print result.state # SUCCESS # Enhance above, create/send mesage to queue name 'lopri' # add.apply_async((2, 2), queue='lopri', countdown=5) # get propagated error if any try: result = add.delay(x) result.get() # propagate=True by default except Exception as e: print '=========' print e # disable propagated error if any
from tasks import add from celery import group # se lanzar 100 tareas for i in range(1, 100): result = add.apply_async((i, i), ) print(i, result.get()) # se lanzan 4 tareas dentro de un grupo numbers = [(2, 2), (4, 4), (8, 8), (16, 16)] res = group(add.s(i, j) for i, j in numbers).apply_async(queue='priority.high', serializer="json") # aquí tenemos el resultado de las tareas print(res.get())
import gevent import redis_lock from redis import Redis from tasks import add from celery.result import AsyncResult ids = [] connection = Redis(host='localhost', port=6379, db=0) for i in range(15): lock = redis_lock.Lock(connection, "name-of-the-lock") if lock.acquire(blocking=False): result = add.apply_async([5, i]) print(result.task_id) ids.append(result.task_id) lock.release() else: print("Someone else has the lock.") #for i in range(15): # result = add.apply_async([5, i]) # print(result.task_id) # ids.append(result.task_id) for _id in ids: task = AsyncResult(_id) print(task.get())
def background_task(): task = add.apply_async(args=[10, 20]) return redirect(url_for('taskstatus', task_id=task.id))
from tasks import add, substract, multiply import time result = add.apply_async((4, 4), queue='add') print(f"Job_ID{result}") print(f"Result status {result.status}") time.sleep(15) print(f"Result status {result.status}") result = substract.apply_async((10, 4), queue='substract') print(f"Job_ID{result}") print(f"Result status {result.status}") time.sleep(15) print(f"Result status {result.status}") multiply.apply_async((20, 30), queue='multiply') print(f"Job_ID{result}") print(f"Result status {result.status}") time.sleep(15) print(f"Result status {result.status}")
from datetime import datetime, timedelta import time from celery import states from tasks import app, add # NOTE: After run client, wait 15 seconds and run a worker if __name__ == '__main__': async_results = [] async_results.append(add.apply_async((2, 3), expires=10)) # You must be careful when use datetime as expire time. # Because Celery default use UTC timezone dt = datetime.now(app.timezone) + timedelta(seconds=10) async_results.append(add.apply_async((2, 3), expires=dt)) time.sleep(30) for result in async_results: print('{0}: {1}, {2}'.format(result.task_id, result.state, result.result)) assert result.state == states.REVOKED
def test_async(): result = add.apply_async((2, 2)).get() log.info('add.apply_async(2, 2) = %s' % result)
def test_linking(): result = add.apply_async((2, 2), link=mul.s(5)) print(result.get()) print(list(result.collect()))
from tasks import add aa = add.delay(4, 4) print(add.apply_async((2, 3))) print(aa.get(timeout=1))
from tasks import app, add if __name__ == '__main__': add.apply_async((2, 3), retry=True, retry_policy={ 'max_retries': 3, 'interval_start': 0, 'interval_step': 0.2, 'interval_max': 0.2, })
from tasks import add, echo, hello, multiply if __name__ == "__main__": add.apply_async((2, 2), queue='fast', countdown=10) multiply.apply_async((5, 5), queue='fast', countdown=10) echo.apply_async(("This is the message of echo task", False), queue='default', countdown=10) hello.apply_async(("Hello", "Fran"), queue='processing', countdown=10)
from celery import signature from tasks import add if __name__ == '__main__': # shorcut: add.s(2, 2) # or: signature('tasks.add', args=(2, 2), countdown=10) sig = add.signature((2, 2), countdown=3) print(sig, json.dumps(sig)) # call local ret = sig() print('Call local: %s' % ret) # call remote # NOTE: ~sig is equal to sig.delay().get() async_ret = sig.delay() ret = async_ret.get() print('Call remote: %s' % ret) # paitial partial = add.s(2) async_ret = partial.delay(3) ret = async_ret.get() print('Partial: %s' % ret) async_ret = add.apply_async((2, 2), link=add.signature((3, 3), immutable=True)) ret = async_ret.get() print('Use signature in task link: %s' % ret)
async def events_ws_sender(websocket): async with broadcast.subscribe(channel='task') as subscriber: async for event in subscriber: ans = add.apply_async(([event.message]), serializer="json") # celery task await websocket.send_text(ans.get())
from tasks import add, add_kwargs async_result = add.delay(1, 2) print(async_result.get()) async_result = add.apply_async([1, 2]) print(async_result.get()) async_result = add_kwargs.delay(1, 2, c=3, d=4) print(async_result.get()) async_result = add_kwargs.apply_async([1, 2], {'c': 3, 'd': 4}) print(async_result.get())
import time from celery import group, chain, chord from tasks import sendmail, add, take, no_argument, xsum # 默认一条celery队列,但是在任务执行的过程中,因为只有一条队列,所以任务执行是同步的。 result = sendmail.delay(dict(to='www.com')) # result1 = sendmail.apply_async(args=(dict(to='*****@*****.**'), )) # result2 = add.apply_async(args=(2, 3)) # 多条队列同步进行,需要开启多个路由, 需要在后面传入队列参数queue result3 = take.apply_async(args=(10, 1), queue='web_tasks') # 关联任务, 将前一个任务的结果作为参数传入下一个任务 result4 = add.apply_async(args=(2, 2), link=add.s(3), queue='default') # # 关联任务, 将前一个任务的结果作为参数传入下一个任务 # result5 = add.apply_async(args=(2, 2), link=add.s(4), queue='default') # # 关联任务, 将前一个任务的结果作为参数传入下一个任务 result6 = add.apply_async(args=(2, 2), link=no_argument.si(), queue='default') # # 关联任务, 将前一个任务的结果作为参数传入下一个任务 result7 = add.apply_async(args=(2, 2), link=no_argument.signature(immutable=True), queue='default') # 过期时间 result8 = add.apply_async(args=(2, 3), expires=10, queue='default') # 并行调度, 结果返回列表 result9 = group(add.s(i, i) for i in range(10))(queue='default') # 串行调度, 结果16 result10 = chain(add.s(2, 2), add.s(4), add.s(8))() # chord - 带回调的 group #result11 = chord((add.s(i, i) for i in range(10)), xsum.s())(queue='default')
from tasks import add, add_in_priority import time add.apply_async((1, 1), queue='q1') add.apply_async((1, 1), queue='q2') add.apply_async((1, 1), priority=10, queue='q1') time.sleep(1) add_in_priority.apply_async((1, 1), priority=0, queue='q1')
def get(self): result = add.apply_async(args=[6, 4], serializer='pickle') yield result #result = add.delay(4, 4) self.write(str(result.get())) self.finish()
from tasks import add, xsum # Basic example res = add.delay(5, 5) print(res.get()) # Same res = add.apply_async((5, 5)) print(res.get()) # Subprocess signature = add.s(5, 5) res = signature.apply_async() # res = signature.delay() print(res.get()) # Callback Example res = add.apply_async((2, 2), link=add.s(8)) print(res.get()) # Chaining from celery import chain chaining = chain(add.s(2, 2), add.s(4), add.s(8)) # 2 + 2 + 4 + 8 res = chaining.delay() print(res.get()) chaining = add.s(2, 2) | add.s(4) | add.s(8) res = chaining.delay() print(res.get()) # Grouping (Parallel)
from tasks import add, mul, pdf x = random.choice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) y = random.choice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) print "x={0}, {1}".format(x, y) result = mul.delay(x, y) print result.id # message id print "mul.delay(x, y)={0}".format(result.get()) result = mul.apply_async((x, y), countdown=5) # Same as add.delay(2, 2) print result.id # message id print "mul.delay(x, y)={0}".format(result.get()) result = add.apply_async((2, 2)) print result.get() print 'result.successful(): {0}'.format(result.successful()) print result.state # SUCCESS # Enhance above, create/send mesage to queue name 'lopri' # add.apply_async((2, 2), queue='lopri', countdown=5) # get propagated error if any try: result = add.delay(x) result.get() # propagate=True by default except Exception as e: print '=========' print e # disable propagated error if any
#!/usr/bin/env python # encoding: utf-8 # from __future__ import absolute_import from tasks import add # add.apply_async((1, ), priority=1) # add.apply_async((1, ), priority=7) # add.apply_async((2, ), priority=6) # add.apply_async((3, ), priority=5) # add.apply_async((4, ), priority=8) # add.apply_async((5, ), priority=9) # after 5 seconds, execute task # add.apply_async(("after 5 seconds, execute task", ), countdown=5) # At 2018-02-07 08:41:00, execute task # add.apply_async(args=("At xxxx-xx-xx xx:xx:xx, execute task", ), eta=datetime(2018,2,7,8,57,0)) # At 2018-02-07 08:41:00, execute task # add.apply_async((1,2),eta=datetime(2018,2,7,9,28,0)) # add.apply_async((2,6), countdown=5) result = add.apply_async((88888, 6), countdown=5) print 'task_status----->', result.status print 'task_id----->', result.id print 'task_result----->', result.get()
import random from tasks import add from datetime import datetime from datetime import timedelta from decouple import config QNT_TASKS = config('QNT_TASKS', default=1000, cast=int) NOW = datetime.utcnow() print(NOW.isoformat()) for num, _ in enumerate(range(QNT_TASKS)): print(num) a = random.randint(1, 100) b = random.randint(-100, 1) LATER = NOW + timedelta(seconds=10) add.apply_async((a, b), eta=LATER)
from tasks import download_img, add if __name__ == '__main__': url = '' download_img.apply_async(args=(url, )) add.apply_async(args=(3, 4), queue='add')
from tasks import add result = add.apply_async((4, 4), countdown=10) print(result.get())
# run_basic.py from tasks import add task1 = add.delay(2, 5) task2 = add.apply_async(args=[4, 2]) task3 = add.apply_async(kwargs={"x": 3, "y": 6}) print(" 작업 ID 조회 ".center(50, "=")) print(f" task1: {task1.id}") print(f" task2: {task2.id}") print(f" task3: {task3.id}") print(" 작업 완료여부 조회 ".center(50, "=")) print(f" task1: {task1.ready()}") print(f" task2: {task2.ready()}") print(f" task3: {task3.ready()}") print(" 결과 데이터 조회 (완료될때까지 Pause) ".center(50, "=")) print(f" task1: {task1.get()}") print(f" task2: {task2.get()}") print(f" task3: {task3.get()}") print(" 작업 완료여부 조회 ".center(50, "=")) print(f" task1: {task1.ready()}") print(f" task2: {task2.ready()}") print(f" task3: {task3.ready()}")
from tasks import app, add if __name__ == '__main__': async_result = add.apply_async((2, 3), queue='priority.high') ret = async_result.get() print('Result: %s' % ret)