import celery import logging import thread import threading import time from celery import events import taskflow.engines.dist_engine.celeryconfig as CELERY_CONFIG from taskflow.engines.dist_engine import dist_translator from taskflow.engines.dist_engine import dtclient from taskflow import states LOG = logging.getLogger(__name__) celery = celery.Celery() celery.config_from_object(CELERY_CONFIG) class Events(threading.Thread): """Class to capture taskflow events""" def __init__(self, app, engine): threading.Thread.__init__(self) self.daemon = True self._celery_app = app self._engine = engine self._running = False self.stop_event = threading.Event() def start(self): threading.Thread.start(self)
from dalalbull.celery import app import urllib2 import json import datetime from .models import User,Portfolio,Stock_data,Transaction,History,Pending,Old_Stock_data from login.consumers import niftyChannelDataPush,leaderboardChannelDataPush,graphDataPush,portfolioDataPush ###----------- import celery from celery import Celery from datetime import timedelta celery = Celery(__name__) celery.config_from_object(__name__) #------------- #Deletes all old stock data #======Tasks======# #------------- @celery.task def say_hello():
#!/usr/bin/env python import SocketServer import ConfigParser as CFG import logging import celery import indexdns import base64 import celeryconfig import argparse celery = celery.Celery('indexdns') celery.config_from_object('celeryconfig') class DNSReceiver(SocketServer.BaseRequestHandler): def __init__(self, request, client_address, server): logger = logging.getLogger() logger.info("Server started") SocketServer.BaseRequestHandler.__init__(self, request, client_address, server) return def handle(self): logger = logging.getLogger() data = self.request[0] logger.info("%s" % self.client_address[0]) print "got packet" indexdns.indexp.delay(base64.b64encode(data))
# coding: utf-8 import os import sys sys.path.append('example_project') os.environ['DJANGO_SETTINGS_MODULE'] = 'settings_server' import django django.setup() from .conf import config import celery celery = celery.Celery(name="django_rpc.server") celery.config_from_object(config) celery.autodiscover_tasks(['django_rpc.celery'])
import os from celery import Celery import celery from django.conf import settings os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'subscriber.settings') # This is used when the service is run by Celery, as a worker. celery = Celery('fitbit_subscriber') celery.config_from_object(settings) celery.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
""" Background task handler combining Celery and Flask. :copyright: (c) 2016 Pinn Technologies, Inc. :license: All rights reserved """ import celery from dock import worker # If you have application modules, say in a common folder, # you can now import and use within containers celery = celery.Celery(broker=worker.default.broker_url) celery.config_from_object(worker.default) @celery.task() def sample(x): """Sample.""" print x return x
# -*- coding:utf-8 -*- __author__ = "aleimu" __date__ = "2018-09-29" __doc__ = """使用celery和redis做异步服务,如向手机发送短信提醒,发邮件等""" import celery import traceback from tools.utils.logger import logger celery = celery.Celery() celery.config_from_object('cache/celery_config') # celery define sms_queue = 'sub_push' sms_template = 'push_srv.common_use' # 使用短信通用模板,短信全部内容由自己定 SMS_FLAG = True # 发送短信 def send_celery(telephones, msg, celery_route=sms_template, queue=sms_queue): if not SMS_FLAG: return False logger.info('start send sms,telephone:%s,msg:%s' % (telephones, msg)) try: if not telephones: logger.info('telephones is null') return False if not isinstance(telephones, list): telephones = [telephones] for telephone in telephones: result = celery.send_task(celery_route, args=[telephone, msg],
import celery celery = celery.Celery('celery_module', include=['celery_module.tasks']) celery.config_from_object('celery_module.config') if __name__ == '__main__': pass
#!/usr/bin/env python import SocketServer import ConfigParser as CFG import logging import celery import indexdns import base64 import celeryconfig import argparse celery = celery.Celery('indexdns') celery.config_from_object('celeryconfig') class DNSReceiver(SocketServer.BaseRequestHandler): def __init__(self, request, client_address, server): logger = logging.getLogger() logger.info("Server started") SocketServer.BaseRequestHandler.__init__(self, request, client_address, server) return def handle(self): logger = logging.getLogger() data = self.request[0] logger.info("%s" % self.client_address[0]) print "got packet" indexdns.indexp.delay(base64.b64encode(data)) if __name__ == "__main__":
if state in PROPAGATE_STATES: # retval is an Exception # return '{}: {}'.format(retval.__class__.__name__, str(retval)) # return an empty list in case a task raises an exception return [] return retval celery.backends.redis.RedisBackend._unpack_chord_result = _unpack_chord_result return celery celery = patch_celery().Celery(__name__) celery.config_from_object(CeleryConfig) def one_sentence_per_doc(doc): """Enforce one sentence per doc to help with dependency parsing.""" doc[0].sent_start = True for i in range(1, len(doc)): doc[i].sent_start = False return doc # load spaCy model and set up pipeline nlp = en_core_web_sm.load() nlp.add_pipe(one_sentence_per_doc, before='parser') # load the opinion lexicon to be used for sentiment analysis
"""Celery setup - raven hook and configuration.""" import celery #import sentry_sdk from atenvironment import environment #from sentry_sdk.integrations.celery import CeleryIntegration @environment('DSN') def init_sentry(dsn_str): sentry_sdk.init(dsn_str, integrations=[CeleryIntegration()]) #init_sentry() celery = celery.Celery() celery.config_from_object('tsa.celeryconfig')
#!/usr/bin/env python import SocketServer import ConfigParser as CFG import logging import celery import indexdns import base64 import celeryconfig import argparse celery = celery.Celery("indexdns") celery.config_from_object("celeryconfig") class DNSReceiver(SocketServer.BaseRequestHandler): def __init__(self, request, client_address, server): logger = logging.getLogger() logger.info("Server started") SocketServer.BaseRequestHandler.__init__(self, request, client_address, server) return def handle(self): logger = logging.getLogger() data = self.request[0] logger.info("%s" % self.client_address[0]) indexdns.indexp.delay(base64.b64encode(data)) if __name__ == "__main__": parser = argparse.ArgumentParser(description="Capture DNS forward packets")
import celery import logging from flask import Flask from flask.ext.assets import Environment from flask.ext.compress import Compress from flask_peewee.db import Database from tweepy import API, OAuthHandler from . import settings app = Flask(__name__) app.config.from_object(settings) celery = celery.Celery() celery.config_from_object(settings) assets = Environment(app) compress = Compress(app) db = Database(app) auth_config = lambda k: app.config["TWITTER"][k] auth = OAuthHandler(auth_config("consumer_key"), auth_config("consumer_secret")) auth.set_access_token(auth_config("access_token"), auth_config("access_token_secret")) api = API(auth) log_format = """ Message type: %(levelname)s Location: %(pathname)s:%(lineno)d