def test_copy(): queue = NotifyingQueue() assert queue.copy() == [] queue.put(1) assert queue.copy() == [1] assert queue.peek() == 1, 'copy must preserve the queue' queue.put(2) assert queue.copy() == [1, 2], 'copy must preserve the items order'
def test_copy(): queue = NotifyingQueue() assert queue.copy() == [] queue.put(1) assert queue.copy() == [1] assert queue.peek() == 1, 'copy must preserve the queue' queue.put(2) assert queue.copy() == [1, 2], 'copy must preserve the items order'
def test_event_must_be_set(): queue = NotifyingQueue() event_stop = Event() data_or_stop = event_first_of(queue, event_stop) spawn_after_seconds = 1 element = 1 gevent.spawn_later(spawn_after_seconds, add_element_to_queue, queue, element) assert data_or_stop.wait()
def init_queue_for( self, queue_identifier: QueueIdentifier, items: typing.List[QueueItem_T], ) -> Queue_T: """ Create the queue identified by the queue_identifier and initialize it with `items`. """ recipient = queue_identifier.recipient queue = self.queueids_to_queues.get(queue_identifier) assert queue is None queue = NotifyingQueue(items=items) self.queueids_to_queues[queue_identifier] = queue events = self.get_health_events(recipient) greenlet_queue = gevent.spawn( single_queue_send, self, recipient, queue, queue_identifier, self.event_stop, events.event_healthy, events.event_unhealthy, self.retries_before_backoff, self.retry_interval, self.retry_interval * 10, ) if queue_identifier.channel_identifier == CHANNEL_IDENTIFIER_GLOBAL_QUEUE: greenlet_queue.name = f'Queue for {pex(recipient)} - global' else: greenlet_queue.name = ( f'Queue for {pex(recipient)} - {queue_identifier.channel_identifier}' ) greenlet_queue.link_exception(self.on_error) self.greenlets.append(greenlet_queue) log.debug( 'new queue created for', node=pex(self.raiden.address), queue_identifier=queue_identifier, items_qty=len(items), ) return queue
def init_queue_for( self, recipient: typing.Address, queue_name: bytes, items: typing.List[QueueItem_T], ) -> Queue_T: """ Create the queue identified by the pair `(recipient, queue_name)` and initialize it with `items`. """ queueid = (recipient, queue_name) queue = self.queueids_to_queues.get(queueid) assert queue is None queue = NotifyingQueue(items=items) self.queueids_to_queues[queueid] = queue events = self.get_health_events(recipient) greenlet_queue = gevent.spawn( single_queue_send, self, recipient, queue, self.event_stop, events.event_healthy, events.event_unhealthy, self.retries_before_backoff, self.retry_interval, self.retry_interval * 10, ) if queue_name == b'global': greenlet_queue.name = f'Queue for {pex(recipient)} - global' else: greenlet_queue.name = f'Queue for {pex(recipient)} - {pex(queue_name)}' self.greenlets.append(greenlet_queue) log.debug( 'new queue created for', node=pex(self.raiden.address), token=pex(queue_name), to=pex(recipient), ) return queue
def __init__( self, handle_messages_callback: Callable[[MatrixSyncMessages], bool], handle_member_join_callback: Callable[[Room], None], base_url: str, token: str = None, user_id: str = None, valid_cert_check: bool = True, sync_filter_limit: int = 20, cache_level: CACHE = CACHE.ALL, http_pool_maxsize: int = 10, http_retry_timeout: int = 60, http_retry_delay: Callable[[], Iterable[float]] = lambda: repeat(1), environment: Environment = Environment.PRODUCTION, user_agent: str = None, ) -> None: self.token: Optional[str] = None self.environment = environment self.handle_messages_callback = handle_messages_callback self._handle_member_join_callback = handle_member_join_callback self.response_queue: NotifyingQueue[Tuple[ UUID, JSONResponse, datetime]] = NotifyingQueue() self.stop_event = Event() super().__init__(base_url, token, user_id, valid_cert_check, sync_filter_limit, cache_level) self.api = GMatrixHttpApi( base_url, token, pool_maxsize=http_pool_maxsize, retry_timeout=http_retry_timeout, retry_delay=http_retry_delay, long_paths=("/sync", ), user_agent=user_agent, ) self.api.validate_certificate(valid_cert_check) # Monotonically increasing id to ensure that presence updates are processed in order. self._presence_update_ids: Iterator[int] = itertools.count() self._worker_pool = gevent.pool.Pool(size=20) # Gets incremented every time a sync loop is completed. This is useful since the sync token # can remain constant over multiple loops (if no events occur). self.sync_progress = SyncProgress(self.response_queue) self._sync_filter_id: Optional[int] = None
def get_queue_for(self, recipient, queue_name): """ Return the queue identified by the pair `(recipient, queue_name)`. If the queue doesn't exist it will be instantiated. """ queueid = (recipient, queue_name) queue = self.queueid_to_queue.get(queueid) if queue is not None: return queue queue = NotifyingQueue() self.queueid_to_queue[queueid] = queue events = self.get_health_events(recipient) self.greenlets.append( gevent.spawn( single_queue_send, self, recipient, queue, self.event_stop, events.event_healthy, events.event_unhealthy, self.retries_before_backoff, self.retry_interval, self.retry_interval * 10, )) if log.isEnabledFor(logging.DEBUG): log.debug( 'new queue created for', node=pex(self.raiden.address), token=pex(queue_name), to=pex(recipient), ) return queue
def get_channel_queue(self, receiver_address, token_address): key = ( receiver_address, token_address, ) if key in self.channel_queue: return self.channel_queue[key] queue = NotifyingQueue() self.channel_queue[key] = queue events = self.get_health_events(receiver_address) self.greenlets.append( gevent.spawn( single_queue_send, self, receiver_address, queue, self.event_stop, events.event_healthy, events.event_unhealthy, self.retries_before_backoff, self.retry_interval, self.retry_interval * 10, )) if log.isEnabledFor(logging.DEBUG): log.debug( 'new queue created for', node=pex(self.raiden.address), token=pex(token_address), to=pex(receiver_address), ) return queue
def init_queue_for(self, recipient, queue_name, items): """ Create the queue identified by the pair `(recipient, queue_name)` and initialize it with `items`. """ queueid = (recipient, queue_name) queue = self.queueids_to_queues.get(queueid) assert queue is None queue = NotifyingQueue(items=items) self.queueids_to_queues[queueid] = queue events = self.get_health_events(recipient) self.greenlets.append( gevent.spawn( single_queue_send, self, recipient, queue, self.event_stop, events.event_healthy, events.event_unhealthy, self.retries_before_backoff, self.retry_interval, self.retry_interval * 10, )) if log.isEnabledFor(logging.DEBUG): log.debug( 'new queue created for', node=pex(self.raiden.address), token=pex(queue_name), to=pex(recipient), ) return queue
def test_not_empty(): queue = NotifyingQueue(items=[1, 2]) assert queue.is_set()
def test_queue(): queue = NotifyingQueue() assert queue.copy() == [] queue.put(1) assert queue.copy() == [1] assert queue.peek() == 1, "copy must preserve the queue" queue.put(2) assert queue.copy() == [1, 2], "copy must preserve the items order" assert queue.peek() == 1, "copy must preserve the queue" assert queue.get() == 1, "get should return first item" assert queue.peek() == 2, "get must remove first item"
def test_not_empty(): queue = NotifyingQueue(items=[1, 2]) assert queue.is_set()