def test_retry_method_exponential(self): f = exponential(1, 2, 4) assert f[0](1, *f[1]), 1 assert f[0](2, *f[1]), 2 assert f[0](3, *f[1]), 4 assert f[0](4, *f[1]), 8 pytest.raises(StopRetry, f[0], 5, *f[1])
def task(queue=DEFAULT_QUEUE_NAME, hard_timeout=3 * 60, unique=True, lock=None, lock_key=None, retry=True, retry_on=(Exception, ), retry_method=exponential(60, 2, 5), schedule=None, batch=False): retry_on = retry_on + ALWAYS_RETRY_ON def wrapper(func): job_queue = JobQueue.instance() record_dynamic_dependency_provider(get_loading_component_name(), 'job', queue) def _delay(f, when=None): @functools.wraps(f) def _delay_inner(*args, **kwargs): _queue = kwargs.pop('queue', None) _hard_timeout = kwargs.pop('hard_timeout', None) _unique = kwargs.pop('unique', None) _lock = kwargs.pop('lock', None) _lock_key = kwargs.pop('lock_key', None) _when = kwargs.pop('when', None) or when _retry = kwargs.pop('retry', None) _retry_on = kwargs.pop('retry_on', None) _retry_method = kwargs.pop('retry_method', None) return job_queue.delay(f, args=args, kwargs=kwargs, queue=_queue, hard_timeout=_hard_timeout, unique=_unique, lock=_lock, lock_key=_lock_key, when=_when, retry=_retry, retry_on=_retry_on, retry_method=_retry_method) return _delay_inner def _wrap(f): @functools.wraps(f) def func_wrapper(*_args, **_kwargs): frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) if mod.__name__ == 'tasktiger.worker' or job_queue.config['ALWAYS_EAGER']: # ALWAYS_EAGER means sync and async call, tasktiger.worker means async call if _args and isinstance(_args[0], dict) and 'a' in _args[0] and 'k' in _args[0]: a = [from_json(a) for a in _args[0]['a']] k = {k: from_json(v) for k, v in _args[0]['k'].items()} else: a = _args k = _kwargs expired_at = k.pop('expired_at', None) current_time = get_current_time() if expired_at and expired_at <= current_time: LOGGER.debug('ignore expired task: %(expired_at)s, %(current)s', {'expired_at': expired_at, 'current': current_time}) return else: a = _args k = _kwargs return f(*a, **k) _func_wrapper = job_queue.task(queue=queue, hard_timeout=hard_timeout, unique=unique, lock=lock, lock_key=lock_key, retry=retry, retry_on=retry_on, retry_method=retry_method, schedule=schedule, batch=batch)(func_wrapper) _func_wrapper.delay = _delay(_func_wrapper) _func_wrapper.delay_after = _delay(_func_wrapper, when=ENQUEUE_AFTER_TIMEDELTA) return _func_wrapper return _wrap(func) return wrapper
def task(queue=DEFAULT_QUEUE_NAME, hard_timeout=3 * 60, unique=True, lock=None, lock_key=None, retry=True, retry_on=(Exception, ), retry_method=exponential(60, 2, 5), schedule=None, batch=False): retry_on = retry_on + ALWAYS_RETRY_ON job_queue = JobQueue.instance() def wrapper(func): record_dynamic_dependency_provider(get_loading_component_name(), 'job', queue) def _delay(f, when=None): @functools.wraps(f) def _delay_inner(*args, **kwargs): _queue = kwargs.pop('queue', None) _hard_timeout = kwargs.pop('hard_timeout', None) _unique = kwargs.pop('unique', None) _lock = kwargs.pop('lock', None) _lock_key = kwargs.pop('lock_key', None) _when = kwargs.pop('when', None) or when _retry = kwargs.pop('retry', None) _retry_on = kwargs.pop('retry_on', None) _retry_method = kwargs.pop('retry_method', None) return job_queue.delay(f, args=args, kwargs=kwargs, queue=_queue, hard_timeout=_hard_timeout, unique=_unique, lock=_lock, lock_key=_lock_key, when=_when, retry=_retry, retry_on=_retry_on, retry_method=_retry_method) return _delay_inner def _wrap(f): @functools.wraps(f) def func_wrapper(*_args, **_kwargs): frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) if mod.__name__ == 'tasktiger.worker' or job_queue.config[ 'ALWAYS_EAGER']: # ALWAYS_EAGER means sync and async call, tasktiger.worker means async call if _args and isinstance( _args[0], dict) and 'a' in _args[0] and 'k' in _args[0]: a = [from_json(a) for a in _args[0]['a']] k = {k: from_json(v) for k, v in _args[0]['k'].items()} else: a = _args k = _kwargs expired_at = k.pop('expired_at', None) current_time = get_current_time() if expired_at and expired_at <= current_time: LOGGER.debug( 'ignore expired task: %(expired_at)s, %(current)s', { 'expired_at': expired_at, 'current': current_time }) return else: a = _args k = _kwargs return f(*a, **k) _func_wrapper = job_queue.task(queue=queue, hard_timeout=hard_timeout, unique=unique, lock=lock, lock_key=lock_key, retry=retry, retry_on=retry_on, retry_method=retry_method, schedule=schedule, batch=batch)(func_wrapper) _func_wrapper.delay = _delay(_func_wrapper) _func_wrapper.delay_after = _delay(_func_wrapper, when=ENQUEUE_AFTER_TIMEDELTA) return _func_wrapper return _wrap(func) return wrapper