def register(func): record_dynamic_dependency_provider(get_loading_component_name(), 'document-statement', statement_name) register_document_statement(statement_name, func) @wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) return wrapper
def __init__(self, method, path_template, website, tags=(), delegates_to=None, **path_template_params): assert website self.method = method self.path_template = path_template self.website = website.lower() self.tags = tags self.delegates_to = delegates_to self.path_template_params = path_template_params veil_component.record_dynamic_dependency_provider(veil_component.get_loading_component_name(), 'website', self.website) publish_new_website_event(self.website)
def wrapper(func): job_queue = JobQueue.instance() record_dynamic_dependency_provider(get_loading_component_name(), 'job', queue) def _delay(f, when=None): @functools.wraps(f) def _delay_inner(*args, **kwargs): _queue = kwargs.pop('queue', None) _hard_timeout = kwargs.pop('hard_timeout', None) _unique = kwargs.pop('unique', None) _lock = kwargs.pop('lock', None) _lock_key = kwargs.pop('lock_key', None) _when = kwargs.pop('when', None) or when _retry = kwargs.pop('retry', None) _retry_on = kwargs.pop('retry_on', None) _retry_method = kwargs.pop('retry_method', None) return job_queue.delay(f, args=args, kwargs=kwargs, queue=_queue, hard_timeout=_hard_timeout, unique=_unique, lock=_lock, lock_key=_lock_key, when=_when, retry=_retry, retry_on=_retry_on, retry_method=_retry_method) return _delay_inner def _wrap(f): @functools.wraps(f) def func_wrapper(*_args, **_kwargs): frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) if mod.__name__ == 'tasktiger.worker' or job_queue.config['ALWAYS_EAGER']: # ALWAYS_EAGER means sync and async call, tasktiger.worker means async call if _args and isinstance(_args[0], dict) and 'a' in _args[0] and 'k' in _args[0]: a = [from_json(a) for a in _args[0]['a']] k = {k: from_json(v) for k, v in _args[0]['k'].items()} else: a = _args k = _kwargs expired_at = k.pop('expired_at', None) current_time = get_current_time() if expired_at and expired_at <= current_time: LOGGER.debug('ignore expired task: %(expired_at)s, %(current)s', {'expired_at': expired_at, 'current': current_time}) return else: a = _args k = _kwargs return f(*a, **k) _func_wrapper = job_queue.task(queue=queue, hard_timeout=hard_timeout, unique=unique, lock=lock, lock_key=lock_key, retry=retry, retry_on=retry_on, retry_method=retry_method, schedule=schedule, batch=batch)(func_wrapper) _func_wrapper.delay = _delay(_func_wrapper) _func_wrapper.delay_after = _delay(_func_wrapper, when=ENQUEUE_AFTER_TIMEDELTA) return _func_wrapper return _wrap(func)
def __call__(self, func): target = self.delegates_to or func loading_component_name = veil_component.get_loading_component_name() if loading_component_name: veil_component.record_dynamic_dependency_provider(loading_component_name, "website", self.website) widget_namespace = loading_component_name else: widget_namespace = None @functools.wraps(func) def wrapper(*args, **kwargs): with require_current_template_directory_relative_to(target): with require_current_widget_namespace_being(widget_namespace): return func(*args, **kwargs) new_route = Route( route_handler=wrapper, method=self.method, path_template=self.path_template, tags=self.tags, **self.path_template_params ) routes.setdefault(self.website, []).append(new_route) return wrapper
def __call__(self, provider): record_dynamic_dependency_provider(get_loading_component_name(), 'fixture-type', self.fixture_type) return FixtureProvider(self.fixture_type, self.fixture_name or provider.__name__, provider)
def wrapper(func): record_dynamic_dependency_provider(get_loading_component_name(), 'job', queue) def _delay(f, when=None): @functools.wraps(f) def _delay_inner(*args, **kwargs): _queue = kwargs.pop('queue', None) _hard_timeout = kwargs.pop('hard_timeout', None) _unique = kwargs.pop('unique', None) _lock = kwargs.pop('lock', None) _lock_key = kwargs.pop('lock_key', None) _when = kwargs.pop('when', None) or when _retry = kwargs.pop('retry', None) _retry_on = kwargs.pop('retry_on', None) _retry_method = kwargs.pop('retry_method', None) return job_queue.delay(f, args=args, kwargs=kwargs, queue=_queue, hard_timeout=_hard_timeout, unique=_unique, lock=_lock, lock_key=_lock_key, when=_when, retry=_retry, retry_on=_retry_on, retry_method=_retry_method) return _delay_inner def _wrap(f): @functools.wraps(f) def func_wrapper(*_args, **_kwargs): frm = inspect.stack()[1] mod = inspect.getmodule(frm[0]) if mod.__name__ == 'tasktiger.worker' or job_queue.config[ 'ALWAYS_EAGER']: # ALWAYS_EAGER means sync and async call, tasktiger.worker means async call if _args and isinstance( _args[0], dict) and 'a' in _args[0] and 'k' in _args[0]: a = [from_json(a) for a in _args[0]['a']] k = {k: from_json(v) for k, v in _args[0]['k'].items()} else: a = _args k = _kwargs expired_at = k.pop('expired_at', None) current_time = get_current_time() if expired_at and expired_at <= current_time: LOGGER.debug( 'ignore expired task: %(expired_at)s, %(current)s', { 'expired_at': expired_at, 'current': current_time }) return else: a = _args k = _kwargs return f(*a, **k) _func_wrapper = job_queue.task(queue=queue, hard_timeout=hard_timeout, unique=unique, lock=lock, lock_key=lock_key, retry=retry, retry_on=retry_on, retry_method=retry_method, schedule=schedule, batch=batch)(func_wrapper) _func_wrapper.delay = _delay(_func_wrapper) _func_wrapper.delay_after = _delay(_func_wrapper, when=ENQUEUE_AFTER_TIMEDELTA) return _func_wrapper return _wrap(func)