def iterate(self, timeout=None, propagate=True, interval=0.5): """Iterate over the return values of the tasks as they finish one by one. :raises: The exception if any of the tasks raised an exception. """ elapsed = 0.0 results = OrderedDict((result.id, copy(result)) for result in self.results) while results: removed = set() for task_id, result in items(results): if result.ready(): yield result.get(timeout=timeout and timeout - elapsed, propagate=propagate) removed.add(task_id) else: if result.backend.subpolling_interval: time.sleep(result.backend.subpolling_interval) for task_id in removed: results.pop(task_id, None) time.sleep(interval) elapsed += interval if timeout and elapsed >= timeout: raise TimeoutError('The operation timed out')
def iterate(self, timeout=None, propagate=True, interval=0.5): """Iterate over the return values of the tasks as they finish one by one. :raises: The exception if any of the tasks raised an exception. """ elapsed = 0.0 results = OrderedDict( (result.id, copy(result)) for result in self.results) while results: removed = set() for task_id, result in items(results): if result.ready(): yield result.get(timeout=timeout and timeout - elapsed, propagate=propagate) removed.add(task_id) else: if result.backend.subpolling_interval: time.sleep(result.backend.subpolling_interval) for task_id in removed: results.pop(task_id, None) time.sleep(interval) elapsed += interval if timeout and elapsed >= timeout: raise TimeoutError('The operation timed out')
def iterate(self, timeout=None, propagate=True, interval=0.5): """Deprecated method, use :meth:`get` with a callback argument.""" elapsed = 0.0 results = OrderedDict((result.id, copy(result)) for result in self.results) while results: removed = set() for task_id, result in items(results): if result.ready(): yield result.get(timeout=timeout and timeout - elapsed, propagate=propagate) removed.add(task_id) else: if result.backend.subpolling_interval: time.sleep(result.backend.subpolling_interval) for task_id in removed: results.pop(task_id, None) time.sleep(interval) elapsed += interval if timeout and elapsed >= timeout: raise TimeoutError('The operation timed out')
def iterate(self, timeout=None, propagate=True, interval=0.5): """Deprecated method, use :meth:`get` with a callback argument.""" elapsed = 0.0 results = OrderedDict( (result.id, copy(result)) for result in self.results) while results: removed = set() for task_id, result in items(results): if result.ready(): yield result.get(timeout=timeout and timeout - elapsed, propagate=propagate) removed.add(task_id) else: if result.backend.subpolling_interval: time.sleep(result.backend.subpolling_interval) for task_id in removed: results.pop(task_id, None) time.sleep(interval) elapsed += interval if timeout and elapsed >= timeout: raise TimeoutError('The operation timed out')
class LRUCache(UserDict): """LRU Cache implementation using a doubly linked list to track access. :keyword limit: The maximum number of keys to keep in the cache. When a new key is inserted and the limit has been exceeded, the *Least Recently Used* key will be discarded from the cache. """ def __init__(self, limit=None): self.limit = limit self.mutex = threading.RLock() self.data = OrderedDict() def __getitem__(self, key): with self.mutex: value = self[key] = self.data.pop(key) return value def keys(self): # userdict.keys in py3k calls __getitem__ return self.data.keys() def values(self): return list(self._iterate_values()) def items(self): return list(self._iterate_items()) def __setitem__(self, key, value): # remove least recently used key. with self.mutex: if self.limit and len(self.data) >= self.limit: self.data.pop(next(iter(self.data))) self.data[key] = value def __iter__(self): return iter(self.data) def _iterate_items(self): for k in self: try: yield (k, self.data[k]) except KeyError: # pragma: no cover pass iteritems = _iterate_items def _iterate_values(self): for k in self: try: yield self.data[k] except KeyError: # pragma: no cover pass itervalues = _iterate_values def incr(self, key, delta=1): with self.mutex: # this acts as memcached does- store as a string, but return a # integer as long as it exists and we can cast it newval = int(self.data.pop(key)) + delta self[key] = str(newval) return newval
class LRUCache(UserDict): """LRU Cache implementation using a doubly linked list to track access. :keyword limit: The maximum number of keys to keep in the cache. When a new key is inserted and the limit has been exceeded, the *Least Recently Used* key will be discarded from the cache. """ def __init__(self, limit=None): self.limit = limit self.mutex = threading.RLock() self.data = OrderedDict() def __getitem__(self, key): with self.mutex: value = self[key] = self.data.pop(key) return value def keys(self): # userdict.keys in py3k calls __getitem__ return self.data.keys() def values(self): return list(self._iterate_values()) def items(self): return list(self._iterate_items()) def update(self, *args, **kwargs): with self.mutex: data, limit = self.data, self.limit data.update(*args, **kwargs) if limit and len(data) > limit: # pop additional items in case limit exceeded # negative overflow will lead to an empty list for item in islice(iter(data), len(data) - limit): data.pop(item) def __setitem__(self, key, value): # remove least recently used key. with self.mutex: if self.limit and len(self.data) >= self.limit: self.data.pop(iter(self.data).next()) self.data[key] = value def __iter__(self): return iter(self.data) def _iterate_items(self): for k in self: try: yield (k, self.data[k]) except KeyError: # pragma: no cover pass iteritems = _iterate_items def _iterate_values(self): for k in self: try: yield self.data[k] except KeyError: # pragma: no cover pass itervalues = _iterate_values def incr(self, key, delta=1): with self.mutex: # this acts as memcached does- store as a string, but return a # integer as long as it exists and we can cast it newval = int(self.data.pop(key)) + delta self[key] = str(newval) return newval def __getstate__(self): d = dict(vars(self)) d.pop('mutex') return d def __setstate__(self, state): self.__dict__ = state self.mutex = threading.RLock()
class LRUCache(UserDict): """LRU Cache implementation using a doubly linked list to track access. :keyword limit: The maximum number of keys to keep in the cache. When a new key is inserted and the limit has been exceeded, the *Least Recently Used* key will be discarded from the cache. """ def __init__(self, limit=None): self.limit = limit self.mutex = threading.RLock() self.data = OrderedDict() def __getitem__(self, key): with self.mutex: value = self[key] = self.data.pop(key) return value def update(self, *args, **kwargs): with self.mutex: data, limit = self.data, self.limit data.update(*args, **kwargs) if limit and len(data) > limit: # pop additional items in case limit exceeded # negative overflow will lead to an empty list for item in islice(iter(data), len(data) - limit): data.pop(item) def __setitem__(self, key, value): # remove least recently used key. with self.mutex: if self.limit and len(self.data) >= self.limit: self.data.pop(next(iter(self.data))) self.data[key] = value def __iter__(self): return iter(self.data) def _iterate_items(self): for k in self: try: yield (k, self.data[k]) except KeyError: # pragma: no cover pass iteritems = _iterate_items def _iterate_values(self): for k in self: try: yield self.data[k] except KeyError: # pragma: no cover pass itervalues = _iterate_values def _iterate_keys(self): # userdict.keys in py3k calls __getitem__ return keys(self.data) iterkeys = _iterate_keys def incr(self, key, delta=1): with self.mutex: # this acts as memcached does- store as a string, but return a # integer as long as it exists and we can cast it newval = int(self.data.pop(key)) + delta self[key] = str(newval) return newval def __getstate__(self): d = dict(vars(self)) d.pop('mutex') return d def __setstate__(self, state): self.__dict__ = state self.mutex = threading.RLock() if sys.version_info[0] == 3: # pragma: no cover keys = _iterate_keys values = _iterate_values items = _iterate_items else: # noqa def keys(self): return list(self._iterate_keys()) def values(self): return list(self._iterate_values()) def items(self): return list(self._iterate_items())
class QoS(object): """Quality of Service guarantees. Only supports `prefetch_count` at this point. :param channel: AMQ Channel. :keyword prefetch_count: Initial prefetch count (defaults to 0). """ #: current prefetch count value prefetch_count = 0 #: :class:`~collections.OrderedDict` of active messages. _delivered = None def __init__(self, channel, prefetch_count=0): self.channel = channel self.prefetch_count = prefetch_count or 0 self._delivered = OrderedDict() self._delivered.restored = False self._on_collect = Finalize(self, self.restore_unacked_once, exitpriority=1) def can_consume(self): """Returns true if the channel can be consumed from. Used to ensure the client adhers to currently active prefetch limits. """ pcount = self.prefetch_count return (not pcount or len(self._delivered) < pcount) def append(self, message, delivery_tag): """Append message to transactional state.""" self._delivered[delivery_tag] = message def ack(self, delivery_tag): """Acknowledge message and remove from transactional state.""" self._delivered.pop(delivery_tag, None) def reject(self, delivery_tag, requeue=False): """Remove from transactional state and requeue message.""" message = self._delivered.pop(delivery_tag) if requeue: self.channel._restore(message) def restore_unacked(self): """Restore all unacknowledged messages.""" delivered = self._delivered errors = [] while delivered: try: _, message = delivered.popitem() except KeyError: # pragma: no cover break try: self.channel._restore(message) except (KeyboardInterrupt, SystemExit, Exception), exc: errors.append((exc, message)) delivered.clear() return errors
class LRUCache(UserDict): """LRU Cache implementation using a doubly linked list to track access. :keyword limit: The maximum number of keys to keep in the cache. When a new key is inserted and the limit has been exceeded, the *Least Recently Used* key will be discarded from the cache. """ def __init__(self, limit=None): self.limit = limit self.mutex = threading.RLock() self.data = OrderedDict() def __getitem__(self, key): with self.mutex: value = self[key] = self.data.pop(key) return value def update(self, *args, **kwargs): with self.mutex: data, limit = self.data, self.limit data.update(*args, **kwargs) if limit and len(data) > limit: # pop additional items in case limit exceeded for _ in range(len(data) - limit): data.popitem(last=False) def popitem(self, last=True, _needs_lock=IS_PYPY): if not _needs_lock: return self.data.popitem(last) with self.mutex: return self.data.popitem(last) def __setitem__(self, key, value): # remove least recently used key. with self.mutex: if self.limit and len(self.data) >= self.limit: self.data.pop(next(iter(self.data))) self.data[key] = value def __iter__(self): return iter(self.data) def _iterate_items(self, _need_lock=IS_PYPY): with self.mutex if _need_lock else DummyContext(): for k in self: try: yield (k, self.data[k]) except KeyError: # pragma: no cover pass iteritems = _iterate_items def _iterate_values(self, _need_lock=IS_PYPY): with self.mutex if _need_lock else DummyContext(): for k in self: try: yield self.data[k] except KeyError: # pragma: no cover pass itervalues = _iterate_values def _iterate_keys(self): # userdict.keys in py3k calls __getitem__ return keys(self.data) iterkeys = _iterate_keys def incr(self, key, delta=1): with self.mutex: # this acts as memcached does- store as a string, but return a # integer as long as it exists and we can cast it newval = int(self.data.pop(key)) + delta self[key] = str(newval) return newval def __getstate__(self): d = dict(vars(self)) d.pop('mutex') return d def __setstate__(self, state): self.__dict__ = state self.mutex = threading.RLock() if sys.version_info[0] == 3: # pragma: no cover keys = _iterate_keys values = _iterate_values items = _iterate_items else: # noqa def keys(self): return list(self._iterate_keys()) def values(self): return list(self._iterate_values()) def items(self): return list(self._iterate_items())