コード例 #1
0
ファイル: schedulers.py プロジェクト: Arttii/celerybeat-mongo
    def __init__(self, *args, **kwargs):

        if hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_DB"):
            db = current_app.conf.CELERY_MONGODB_SCHEDULER_DB
        else:
            db = "celery"
        if hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_COLLECTION") \
            and current_app.conf.CELERY_MONGODB_SCHEDULER_COLLECTION:
            collection = current_app.conf.CELERY_MONGODB_SCHEDULER_COLLECTION
        else:
            collection = "schedules"

        if hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_URL"):

            connection = Connection(
                current_app.conf.CELERY_MONGODB_SCHEDULER_URL)
            get_logger(__name__).info(
                "backend scheduler using %s/%s:%s",
                current_app.conf.CELERY_MONGODB_SCHEDULER_DB, db, collection)
        else:
            connection = Connection()

        self.db = connection[db][collection]

        self._schedule = {}
        self._last_updated = None
        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval') \
                or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 300)
コード例 #2
0
    def __init__(self, *args, **kwargs):
        if hasattr(current_app.conf, "mongodb_scheduler_db"):
            db = current_app.conf.get("mongodb_scheduler_db")
        elif hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_DB"):
            db = current_app.conf.CELERY_MONGODB_SCHEDULER_DB
        else:
            db = "celery"

        if hasattr(current_app.conf, "mongodb_scheduler_connection_alias"):
            alias = current_app.conf.get('mongodb_scheduler_connection_alias')
        elif hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_CONNECTION_ALIAS"):
            alias = current_app.conf.CELERY_MONGODB_SCHEDULER_CONNECTION_ALIAS
        else:
            alias = "default"

        if hasattr(current_app.conf, "mongodb_scheduler_url"):
            host = current_app.conf.get('mongodb_scheduler_url')
        elif hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_URL"):
            host = current_app.conf.CELERY_MONGODB_SCHEDULER_URL
        else:
            host = None

        self._mongo = mongoengine.connect(db, host=host, alias=alias)

        if host:
            logger.info("backend scheduler using %s/%s:%s",
                        host, db, self.Model._get_collection().name)
        else:
            logger.info("backend scheduler using %s/%s:%s",
                        "mongodb://localhost", db, self.Model._get_collection().name)
        self._schedule = {}
        self._last_updated = None
        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval')
                             or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 5)
コード例 #3
0
ファイル: schedulers.py プロジェクト: mfenniak/django-celery
 def __init__(self, *args, **kwargs):
     self._dirty = set()
     self._finalize = Finalize(self, self.sync, exitpriority=5)
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = (kwargs.get('max_interval')
                          or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
                          or DEFAULT_MAX_INTERVAL)
コード例 #4
0
 def __init__(self, *args, **kwargs):
     self._dirty = set()
     Scheduler.__init__(self, *args, **kwargs)
     self._finalize = Finalize(self, self.sync, exitpriority=5)
     self.max_interval = (kwargs.get('max_interval')
                          or self.app.conf.beat_max_loop_interval
                          or DEFAULT_MAX_INTERVAL)
コード例 #5
0
ファイル: schedulers.py プロジェクト: Arttii/celerybeat-mongo
    def __init__(self, *args, **kwargs):

        if hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_DB"):
            db = current_app.conf.CELERY_MONGODB_SCHEDULER_DB
        else:
            db = "celery"
        if hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_COLLECTION") \
            and current_app.conf.CELERY_MONGODB_SCHEDULER_COLLECTION:
            collection=current_app.conf.CELERY_MONGODB_SCHEDULER_COLLECTION
        else:
            collection="schedules"

        if hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_URL"):
              
             connection=Connection(current_app.conf.CELERY_MONGODB_SCHEDULER_URL) 
             get_logger(__name__).info("backend scheduler using %s/%s:%s",
                    current_app.conf.CELERY_MONGODB_SCHEDULER_DB,
                    db,collection)
        else:
            connection=Connection() 


        self.db=connection[db][collection]
  
        self._schedule = {}
        self._last_updated = None
        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval') \
                or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 300)
コード例 #6
0
ファイル: schedulers.py プロジェクト: AdamG/django-celery
 def __init__(self, *args, **kwargs):
     self._dirty = set()
     self._finalize = Finalize(self, self.sync, exitpriority=5)
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = (kwargs.get("max_interval")
                        or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
                        or DEFAULT_MAX_INTERVAL)
コード例 #7
0
    def __init__(self, *args, **kwargs):
        if hasattr(current_app.conf, 'CELERY_REDIS_SCHEDULER_URL'):
            logger.info('backend scheduler using %s',
                                      current_app.conf.CELERY_REDIS_SCHEDULER_URL)
        else:
            logger.info('backend scheduler using %s',
                                      current_app.conf.CELERY_REDIS_SCHEDULER_URL)

        # how long we should hold on to the redis lock in seconds
        if 'CELERY_REDIS_SCHEDULER_LOCK_TTL' in current_app.conf:
            lock_ttl = current_app.conf.CELERY_REDIS_SCHEDULER_LOCK_TTL
        else:
            lock_ttl = 30

        if lock_ttl < self.UPDATE_INTERVAL.seconds:
            lock_ttl = self.UPDATE_INTERVAL.seconds * 2
        self.lock_ttl = lock_ttl

        self._schedule = {}
        self._last_updated = None
        self._lock_acquired = False
        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval') \
                             or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 300)
        self._lock = rdb.lock('celery:beat:task_lock', timeout=self.lock_ttl)
        self._lock_acquired = self._lock.acquire(blocking=False)
        self.Entry.scheduler = self
コード例 #8
0
ファイル: scheduler.py プロジェクト: chenweihua/devops-1
 def __init__(self, *args, **kwargs):
     app = kwargs['app']
     self.schedule_url = app.conf.get(
         "CELERY_BEAT_REDIS_SCHEDULER_URL",
         DEFAULT_CELERY_BEAT_REDIS_SCHEDULER_URL)
     self.key = app.conf.get("CELERY_BEAT_REDIS_SCHEDULER_KEY",
                             DEFAULT_CELERY_BEAT_REDIS_SCHEDULER_KEY)
     # redis 哨兵模式 sentinels 支持
     if self.schedule_url.startswith('sentinel://'):
         self.broker_transport_options = app.conf.get(
             "CELERY_BROKER_TRANSPORT_OPTIONS",
             DEFAULT_CELERY_BROKER_TRANSPORT_OPTIONS)
         self.rdb = self.sentinel_connect(
             self.broker_transport_options['master_name'])
     else:
         self.rdb = Redis.from_url(self.schedule_url)
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = app.conf.get(
         "CELERY_BEAT_MAX_LOOP_INTERVAL",
         DEFAULT_CELERY_BEAT_MAX_LOOP_INTERVAL)
     app.add_task = partial(self.add, self)
     # 多实例模式锁
     self.multi_mode = app.conf.get(
         "CELERY_BEAT_REDIS_MULTI_NODE_MODE",
         DEFAULT_CELERY_BEAT_REDIS_MULTI_NODE_MODE)
     if self.multi_mode:
         self.lock_key = app.conf.get("CELERY_BEAT_REDIS_LOCK_KEY",
                                      DEFAULT_CELERY_BEAT_REDIS_LOCK_KEY)
         self.lock_ttl = app.conf.get("CELERY_BEAT_REDIS_LOCK_TTL",
                                      DEFAULT_CELERY_BEAT_REDIS_LOCK_TTL)
         self.lock_sleep = app.conf.get(
             "CELERY_BEAT_REDIS_LOCK_SLEEP",
             DEFAULT_CELERY_BEAT_REDIS_LOCK_SLEEP)
         self.lock = self.rdb.lock(self.lock_key, timeout=self.lock_ttl)
コード例 #9
0
ファイル: schedulers.py プロジェクト: jfilak/faf
 def __init__(self, *args, **kwargs):
     self._schedule = {}
     self._last_updated = None
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = (kwargs.get('max_interval')
                          or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 300)
     self.db = db_factory.get_database()
コード例 #10
0
ファイル: scheduler.py プロジェクト: wangjianze/redisbeat
    def __init__(self, *args, **kwargs):
        app = kwargs['app']
        self.key = app.conf.get("CELERY_REDIS_SCHEDULER_KEY",
                                "celery:beat:order_tasks")
        self.schedule_url = app.conf.get("CELERY_REDIS_SCHEDULER_URL",
                                         "redis://localhost:6379")
        # using sentinels
        # supports 'sentinel://:pass@host:port/db
        if self.schedule_url.startswith('sentinel://'):
            self.broker_transport_options = app.conf.get(
                "CELERY_BROKER_TRANSPORT_OPTIONS", {"master_name": "mymaster"})
            self.rdb = self.sentinel_connect(
                self.broker_transport_options['master_name'])
        else:
            self.rdb = StrictRedis.from_url(self.schedule_url)
        Scheduler.__init__(self, *args, **kwargs)
        app.add_task = partial(self.add, self)

        self.multi_node = app.conf.get("CELERY_REDIS_MULTI_NODE_MODE", False)
        # how long we should hold on to the redis lock in seconds
        if self.multi_node:
            self.lock_ttl = current_app.conf.get(
                "CELERY_REDIS_SCHEDULER_LOCK_TTL", 30)
            self._lock_acquired = False
            self._lock = self.rdb.lock('celery:beat:task_lock',
                                       timeout=self.lock_ttl)
            self._lock_acquired = self._lock.acquire(blocking=False)
コード例 #11
0
 def __init__(self, *args, **kwargs):
     self._schedule = {}
     self._last_updated = None
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = (kwargs.get('max_interval')
                          or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 300)
     self.db = db_factory.get_database()
コード例 #12
0
ファイル: schedulers.py プロジェクト: crankycoder/zamboni-lib
 def __init__(self, *args, **kwargs):
     self._dirty = set()
     self._last_flush = None
     self._flush_every = 3 * 60
     self._finalize = Finalize(self, self.flush, exitpriority=5)
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = 5
コード例 #13
0
 def __init__(self, *args, **kwargs):
     self._dirty = set()
     self._last_flush = None
     self._flush_every = 3 * 60
     self._finalize = Finalize(self, self.flush, exitpriority=5)
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = 5
コード例 #14
0
 def __init__(self, *args, **kwargs):
     """Initialize the database scheduler."""
     self._dirty = set()
     Scheduler.__init__(self, *args, **kwargs)
     self._finalize = Finalize(self, self.sync, exitpriority=5)
     self.max_interval = (kwargs.get("max_interval")
                          or self.app.conf.beat_max_loop_interval
                          or DEFAULT_MAX_INTERVAL)
コード例 #15
0
ファイル: scheduler.py プロジェクト: pixy25/examples
 def __init__(self, *args, **kwargs):
     self._storage = Storage()
     self._storage.release_glob()
     self.periodic_manager = PeriodicTaskManager(DbController.get_default_controller(current_app.conf['DB']))
     self._schedule = {}
     self._last_updated = None
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = (kwargs.get('max_interval')
             or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 5)
コード例 #16
0
 def __init__(self, *args, **kwargs):
     app = kwargs['app']
     self.key = app.conf.get("CELERY_REDIS_SCHEDULER_KEY",
                             "celery:beat:order_tasks")
     self.schedule_url = app.conf.get("CELERY_REDIS_SCHEDULER_URL",
                                      "redis://localhost:6379")
     self.rdb = StrictRedis.from_url(self.schedule_url)
     Scheduler.__init__(self, *args, **kwargs)
     app.add_task = partial(self.add, self)
コード例 #17
0
 def __init__(self, *args, **kwargs):
     if len(args) == 0:
         app = kwargs['app']
     else:
         assert len(args) == 1
         app = args[0]
     self.changes = self.changes_class(app.conf.get('database_uri'))
     self.session = self.changes.session
     Scheduler.__init__(self, *args, **kwargs)
コード例 #18
0
    def __init__(self, *args, **kwargs):
        self._schedule = {}
        self._schedule_file = get_schedules_filepath()
        self._last_file_timestamp = os.path.getmtime(self._schedule_file)

        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval')
                             or self.app.conf.beat_max_loop_interval
                             or CELERYBEAT_MAX_LOOP_INTERVAL)
コード例 #19
0
ファイル: schedulers.py プロジェクト: cloudera/hue
 def __init__(self, *args, **kwargs):
     """Initialize the database scheduler."""
     self._dirty = set()
     Scheduler.__init__(self, *args, **kwargs)
     self._finalize = Finalize(self, self.sync, exitpriority=5)
     self.max_interval = (
         kwargs.get('max_interval')
         or self.app.conf.beat_max_loop_interval
         or DEFAULT_MAX_INTERVAL)
コード例 #20
0
 def __init__(self, *args, **kwargs):
     self._schedule = {}
     self._last_updated = None
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = (kwargs.get('max_interval')
                          or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
                          or 5)
     self.celery_rest = CeleryRestClient()
     self._schedule = self.get_from_api()
     self.print_schedule()
コード例 #21
0
    def __init__(self, *args, **kwargs):

        log.info("SQLAlchemyScheduler.__init__ called")

        self._dirty = set()
        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (
            kwargs.get('max_interval') or
            self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or
            DEFAULT_MAX_INTERVAL)
コード例 #22
0
 def __init__(self, *args, **kwargs):
     if hasattr(current_app.conf, "CELERY_REST_SCHEDULER_TASKS_API_HOST"):
         self.api_url = urllib.parse.urljoin(current_app.conf.CELERY_REST_SCHEDULER_TASKS_API_HOST,
                                             '/api/v1/worker_task/')
     else:
         raise Exception('CELERY_REST_SCHEDULER_API_HOST does not exists')
     self._schedule = {}
     self._last_updated = None
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = (kwargs.get('max_interval')
                          or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 5)
コード例 #23
0
    def __init__(self, *args, **kwargs):
        self._schedule = {}
        self._last_updated = None
        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval')
                             or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL
                             or 5)

        redis_host = current_app.conf.REDIS_BEAT_HOST or "localhost"
        redis_port = current_app.conf.REDIS_BEAT_PORT or 6379
        redis_db = current_app.conf.REDIS_BEAT_DB or 0
        self.redis_cli = redis.StrictRedis(redis_host, redis_port, redis_db)
        self.uuid = uuid4().hex
コード例 #24
0
    def __init__(self, *args, **kwargs):
        if hasattr(current_app.conf, 'CELERY_REDIS_SCHEDULER_URL'):
            get_logger(__name__).info('backend scheduler using %s',
                                      current_app.conf.CELERY_REDIS_SCHEDULER_URL)
        else:
            get_logger(__name__).info('backend scheduler using %s',
                                      current_app.conf.CELERY_REDIS_SCHEDULER_URL)

        self._schedule = {}
        self._last_updated = None
        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval') \
                             or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 300)
コード例 #25
0
ファイル: redis_scheduler.py プロジェクト: fossabot/beehive
 def __init__(self, app, schedule=None, max_interval=None,
              Publisher=None, lazy=False, sync_every_tasks=None, **kwargs):
     #self.schedule_filename = kwargs.get('schedule_filename')
     redis_uri = app.conf.CELERY_SCHEDULE_BACKEND
     # set redis manager
     self.manager = RedisManager(redis_uri)
     #keys = self.manager.inspect(pattern='*', debug=False)
     
     self._prefix = app.conf.CELERY_REDIS_SCHEDULER_KEY_PREFIX
     
     self._schedule = redis_collections.Dict(key=self._prefix, redis=self.manager.conn)
     Scheduler.__init__(self, app, schedule=schedule, 
                        max_interval=max_interval, Publisher=Publisher, 
                        lazy=lazy, sync_every_tasks=sync_every_tasks, **kwargs)
コード例 #26
0
ファイル: leek.py プロジェクト: MouseMob/leek
    def __init__(self, *args, **kwargs):
        self.data = {}
        self.last_refresh = None

        if 'LEEK_REDIS_URL' not in current_app.conf:
            raise Exception('Missing LEEK_REDIS_URL celery config')
        self.hash_key = current_app.conf.get('LEEK_HASH_KEY', 'leek')
        self.redis = redis.from_url(current_app.conf.get('LEEK_REDIS_URL'))
        self.tz = timezone.get_timezone(current_app.conf.get('CELERY_TIMEZONE', 'UTC'))

        Scheduler.__init__(self, *args, **kwargs)

        self.max_interval = self.app.conf.get('LEEK_REFRESH_INTERVAL', 5)
        self.refresh_frequency = timedelta(seconds=self.max_interval)
コード例 #27
0
    def __init__(self, *args, **kwargs):
        """Initialize the database scheduler."""
        self.app = kwargs['app']
        self.dburi = kwargs.get('dburi') or self.app.conf.get(
            'beat_dburi') or DEFAULT_BEAT_DBURI
        self.engine, self.Session = session_manager.create_session(self.dburi)
        session_manager.prepare_models(self.engine)

        self._dirty = set()
        Scheduler.__init__(self, *args, **kwargs)
        self._finalize = Finalize(self, self.sync, exitpriority=5)
        self.max_interval = (kwargs.get('max_interval')
                             or self.app.conf.beat_max_loop_interval
                             or DEFAULT_MAX_INTERVAL)
コード例 #28
0
ファイル: schedulers.py プロジェクト: xellos00/python-core
    def __init__(self, *args, **kwargs):
        self.transaction = Transaction()
        self.locator = Locator(self.transaction)
        if hasattr(current_app.conf, "spaceone_scheduler_service"):
            self.service_name = current_app.conf.get("spaceone_scheduler_service")
        else:
            raise SpaceOneSchedulerError("can not find CELERY.spaceone_scheduler_service config")

        self.Service = self.locator.get_service(self.service_name, metadata=self.metadata)
        self._schedule = {}
        self._last_updated = None

        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval')
                             or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 5)
コード例 #29
0
 def __init__(self, *args, **kwargs):
     database_proxy.initialize(kwargs['app'].database)
     self.database_proxy = database_proxy
     """Initialize the database scheduler."""
     self._dirty = set()
     Scheduler.__init__(self, *args, **kwargs)
     self._finalize = Finalize(self, self.sync, exitpriority=5)
     self.max_interval = (kwargs.get('max_interval')
                          or self.app.conf.beat_max_loop_interval
                          or DEFAULT_MAX_INTERVAL)
     self.database_proxy.create_tables([
         PeriodicTask, PeriodicTasks, CrontabSchedule, IntervalSchedule,
         SolarSchedule
     ],
                                       safe=True)
コード例 #30
0
ファイル: schedulers.py プロジェクト: orgsea/celerybeatredis
    def __init__(self, *args, **kwargs):
        if hasattr(current_app.conf, 'CELERY_REDIS_SCHEDULER_URL'):
            logger.info('backend scheduler using %s',
                                      current_app.conf.CELERY_REDIS_SCHEDULER_URL)
        else:
            logger.info('backend scheduler using %s',
                                      current_app.conf.CELERY_REDIS_SCHEDULER_URL)

        self._schedule = {}
        self._last_updated = None
        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval') \
                             or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 300)
        self._lock = rdb.lock('celery:beat:task_lock')
        self._lock_acquired = self._lock.acquire(blocking=False)
        self.Entry.scheduler = self
コード例 #31
0
    def __init__(self, *args, **kwargs):
        app = kwargs['app']
        self.key = app.conf.get("CELERY_REDIS_SCHEDULER_KEY",
                                "celery:beat:order_tasks")
        self.schedule_url = app.conf.get("CELERY_REDIS_SCHEDULER_URL",
                                         "redis://localhost:6379")
        self.rdb = StrictRedis.from_url(self.schedule_url)
        Scheduler.__init__(self, *args, **kwargs)
        app.add_task = partial(self.add, self)

        self.multi_node = app.conf.get("CELERY_REDIS_MULTI_NODE_MODE", False)
        # how long we should hold on to the redis lock in seconds
        if self.multi_node:
            self.lock_ttl = current_app.conf.get("CELERY_REDIS_SCHEDULER_LOCK_TTL", 30)
            self._lock_acquired = False
            self._lock = self.rdb.lock('celery:beat:task_lock', timeout=self.lock_ttl)
            self._lock_acquired = self._lock.acquire(blocking=False)
コード例 #32
0
ファイル: schedulers.py プロジェクト: crankycoder/zamboni-lib
 def reserve(self, entry):
     new_entry = Scheduler.reserve(self, entry)
     # Need to story entry by name, because the entry may change
     # in the mean time.
     self._dirty.add(new_entry.name)
     if self.should_flush():
         self.logger.debug("Celerybeat: Writing schedule changes...")
         self.flush()
     return new_entry
コード例 #33
0
 def reserve(self, entry):
     new_entry = Scheduler.reserve(self, entry)
     # Need to story entry by name, because the entry may change
     # in the mean time.
     self._dirty.add(new_entry.name)
     if self.should_flush():
         self.logger.debug("Celerybeat: Writing schedule changes...")
         self.flush()
     return new_entry
コード例 #34
0
 def reserve(self, entry):
     """
     This is called when a new instance of a task is scheduled to run. Hook
     in here so we can avoid saving updates to tasks that have none.
     """
     new_entry = Scheduler.reserve(self, entry)
     # Add to a list of what has changed. Store by name since the entry
     # itself may be a different instance by the time we get to it.
     self._dirty.add(new_entry.name)
     return new_entry
コード例 #35
0
 def reserve(self, entry):
     """
     This is called when a new instance of a task is scheduled to run. Hook
     in here so we can avoid saving updates to tasks that have none.
     """
     new_entry = Scheduler.reserve(self, entry)
     # Add to a list of what has changed. Store by name since the entry
     # itself may be a different instance by the time we get to it.
     self._dirty.add(new_entry.name)
     return new_entry
コード例 #36
0
    def __init__(self, *args, **kwargs):
        if hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_DB"):
            db = current_app.conf.CELERY_MONGODB_SCHEDULER_DB
        else:
            db = "celery"
        if hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_URL"):
            self._mongo = mongoengine.connect(db, host=current_app.conf.CELERY_MONGODB_SCHEDULER_URL)
            get_logger(__name__).info("backend scheduler using %s/%s:%s",
                    current_app.conf.CELERY_MONGODB_SCHEDULER_URL,
                    db, self.Model._get_collection().name)
        else:
            self._mongo = mongoengine.connect(db)
            get_logger(__name__).info("backend scheduler using %s/%s:%s",
                    "mongodb://localhost",
                    db, self.Model._get_collection().name)

        self._schedule = {}
        self._last_updated = None
        Scheduler.__init__(self, *args, **kwargs)
        self.max_interval = (kwargs.get('max_interval')
                or self.app.conf.CELERYBEAT_MAX_LOOP_INTERVAL or 5)
コード例 #37
0
    def maybe_due(self, entry, publisher=None):
        is_due, next_time_to_run = entry.is_due()
        if not is_due:
            return next_time_to_run
        lock = self._lock(entry.name)
        if not lock:
            return next_time_to_run
        try:
            # Now that we have the lock, double-check the timestamps on the
            # entry before executing it.
            entry = self._schedule.sync(entry.name)
            if entry is None:
                return next_time_to_run
            is_due, next_time_to_run = entry.is_due()
            if not is_due:
                return next_time_to_run

            return Scheduler.maybe_due(self, entry, publisher)
        finally:
            self._unlock(lock)
コード例 #38
0
    def maybe_due(self, entry, publisher=None):
        is_due, next_time_to_run = entry.is_due()
        if not is_due:
            return next_time_to_run
        lock = self._lock(entry.name)
        if not lock:
            return next_time_to_run
        try:
            # Now that we have the lock, double-check the timestamps on the
            # entry before executing it.
            entry = self._schedule.sync(entry.name)
            if entry is None:
                return next_time_to_run
            is_due, next_time_to_run = entry.is_due()
            if not is_due:
                return next_time_to_run

            return Scheduler.maybe_due(self, entry, publisher)
        finally:
            self._unlock(lock)
コード例 #39
0
 def __init__(self, app, **kwargs):
     self._last_timestamp = self._get_latest_change()
     Scheduler.__init__(self, app, **kwargs)
コード例 #40
0
 def tick(self):
     self.logger.debug('DatabaseScheduler: tick')
     Scheduler.tick(self)
     if self.should_sync():
         self.sync()
     return _sleep_interval  # sleep time until next tick
コード例 #41
0
 def __init__(self, app, **kwargs):
     self._last_timestamp = self._get_latest_change()
     Scheduler.__init__(self, app, **kwargs)
コード例 #42
0
ファイル: schedulers.py プロジェクト: marusak/faf
 def reserve(self, entry):
     new_entry = Scheduler.reserve(self, entry)
     return new_entry
コード例 #43
0
ファイル: schedulers.py プロジェクト: alex-hh/django-celery
 def reserve(self, entry):
     new_entry = Scheduler.reserve(self, entry)
     # Need to store entry by name, because the entry may change
     # in the mean time.
     self._dirty.add(new_entry.name)
     return new_entry
コード例 #44
0
        return cls(db_entry)


class DatabaseScheduler(Scheduler):
    Entry = Entry
    _last_timestamp = None
    _schedule = None
    _initial_read = False

    def __init__(self, app, **kwargs):
<<<<<<< HEAD
        print 'DBScheduler initiated'
=======
>>>>>>> eb2a33c4b2f857754113985e104210382f7b5a44
        self._last_timestamp = self._get_latest_change()
        Scheduler.__init__(self, app, **kwargs)

    def _get_latest_change(self):
        query = dbsession.query(DatabaseSchedulerEntry.date_changed)
        query = query.order_by(DatabaseSchedulerEntry.date_changed.desc())
        latest_entry_date = query.first()
        return latest_entry_date

    def setup_schedule(self):
        self.install_default_entries(self.schedule)
        self.update_from_dict(self.app.conf.CELERYBEAT_SCHEDULE)

    def _all_as_schedule(self):
        s = {}
        query = dbsession.query(DatabaseSchedulerEntry)
        query = query.filter_by(enabled=True)
コード例 #45
0
 def tick(self):
     self.logger.debug('DatabaseScheduler: tick')
     Scheduler.tick(self)
     if self.should_sync():
         self.sync()
     return 5  # sleep time until next tick
コード例 #46
0
ファイル: schedulers.py プロジェクト: jfilak/faf
 def reserve(self, entry):
     new_entry = Scheduler.reserve(self, entry)
     return new_entry
コード例 #47
0
 def __init__(self, *args, **kwargs):
     self._dirty = set()
     self._finalize = Finalize(self, self.sync, exitpriority=5)
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = 5
コード例 #48
0
# -*- coding: utf-8 -*-
"""Most of this code is lifted from django-celery project and adapted to run onSQLAlchemy."""
from __future__ import absolute_importimport datetimeimport time
from sqlalchemy.orm import sessionmaker
from celery.beat import Scheduler, ScheduleEntryfrom celery import schedules, current_appfrom celery.utils.timeutils import is_naive
from sqlalchemy_scheduler_models import DatabaseSchedulerEntry, CrontabSchedule, IntervalSchedule

# The schedule objects need to be handled within one scopeSession = sessionmaker(autocommit=False, autoflush=False)dbsession = Session()

class Entry(ScheduleEntry):    model_schedules = ((schedules.crontab, CrontabSchedule, 'crontab'),                       (schedules.schedule, IntervalSchedule, 'interval'))
 def __init__(self, model): self.app = current_app._get_current_object() self.name = model.name self.task = model.task self.schedule = model.schedule self.args = model.args self.kwargs = model.kwargs self.options = dict( queue=model.queue, exchange=model.exchange, routing_key=model.routing_key, expires=model.expires,        ) self.total_run_count = model.total_run_count self.model = model
 if not model.last_run_at:            model.last_run_at = self._default_now()        orig = self.last_run_at = model.last_run_at if not is_naive(self.last_run_at): self.last_run_at = self.last_run_at.replace(tzinfo=None) assert orig.hour == self.last_run_at.hour  # timezone sanity
 def is_due(self): if not self.model.enabled: return False, 5.0 # 5 second delay for re-enable. return self.schedule.is_due(self.last_run_at)
 def _default_now(self): return datetime.datetime.utcnow()
 def __next__(self): self.model.last_run_at = self._default_now() self.model.total_run_count += 1        dbsession.commit() return self.__class__(self.model) next = __next__ # for 2to3
 @classmethod def to_model_schedule(cls, schedule): for schedule_type, model_type, model_field in cls.model_schedules:            schedule = schedules.maybe_schedule(schedule) if isinstance(schedule, schedule_type):                model_schedule = model_type.from_schedule(dbsession, schedule) return model_schedule, model_field raise ValueError( 'Cannot convert schedule type {0!r} to model'.format(schedule))
 @classmethod def from_entry(cls, name, skip_fields=('relative', 'options'), **entry):        options = entry.get('options') or {}        fields = dict(entry) for skip_field in skip_fields:            fields.pop(skip_field, None)        schedule = fields.pop('schedule')        model_schedule, model_field = cls.to_model_schedule(schedule)        fields[model_field] = model_schedule        fields['args'] = fields.get('args') or []        fields['kwargs'] = fields.get('kwargs') or {}        fields['queue'] = options.get('queue')        fields['exchange'] = options.get('exchange')        fields['routing_key'] = options.get('routing_key')
        query = dbsession.query(DatabaseSchedulerEntry)        query = query.filter_by(name=name)        db_entry = query.first() if db_entry is None:            new_entry = DatabaseSchedulerEntry(**fields)            new_entry.name = name            dbsession.add(new_entry)            dbsession.commit()            db_entry = new_entry return cls(db_entry)

class DatabaseScheduler(Scheduler):    Entry = Entry    _last_timestamp = None    _schedule = None    _initial_read = False
 def __init__(self, app, **kwargs): self._last_timestamp = self._get_latest_change()        Scheduler.__init__(self, app, **kwargs)
 def _get_latest_change(self):        query = dbsession.query(DatabaseSchedulerEntry.date_changed)        query = query.order_by(DatabaseSchedulerEntry.date_changed.desc())        latest_entry_date = query.first() return latest_entry_date
 def setup_schedule(self): self.install_default_entries(self.schedule) self.update_from_dict(self.app.conf.CELERYBEAT_SCHEDULE)
 def _all_as_schedule(self):        s = {}        query = dbsession.query(DatabaseSchedulerEntry)        query = query.filter_by(enabled=True) for row in query:            s[row.name] = Entry(row) return s
 def schedule_changed(self):        ts = self._get_latest_change() if ts > self._last_timestamp: self._last_timestamp = ts return True
 def update_from_dict(self, dict_):        s = {} for name, entry in dict_.items(): try:                s[name] = self.Entry.from_entry(name, **entry) except Exception as exc: self.logger.exception('update_from_dict') self.schedule.update(s)
 def tick(self): self.logger.debug('DatabaseScheduler: tick')        Scheduler.tick(self) if self.should_sync(): self.sync() return 5 # sleep time until next tick
 def should_sync(self):        sync_reason_time = (time.time() - self._last_sync) > self.sync_every        sync_reason_task_count = self.sync_every_tasks and self._tasks_since_sync >= self.sync_every_tasks        bool_ = sync_reason_time or sync_reason_task_count self.logger.debug('DatabaseScheduler: should_sync: {0}'.format(bool_)) return bool_
 def sync(self): self._last_sync = time.time() self.logger.debug('DatabaseScheduler: sync') self._schedule = self._all_as_schedule()
 @property def schedule(self):        update = False if not self._initial_read: self.logger.debug('DatabaseScheduler: intial read')            update = True self._initial_read = True elif self.schedule_changed(): self.logger.info('DatabaseScheduler: Schedule changed.')            update = True
 if update: self.sync() return self._schedule
コード例 #49
0
 def reserve(self, entry):
     new_entry = Scheduler.reserve(self, entry)
     # Need to store entry by name, because the entry may change
     # in the mean time.
     self._dirty.add(new_entry.name)
     return new_entry
コード例 #50
0
 def __init__(self, *args, **kwargs):
     Scheduler.__init__(self, *args, **kwargs)
     self.max_interval = 5
     self._dirty = set()
     self._last_flush = None
     self._flush_every = 3 * 60