async def wait_for_announcement(self) -> Tuple[LESPeer, les.HeadInfo]: """Wait for a new announcement from any of our connected peers. Returns a tuple containing the LESPeer on which the announcement was received and the announcement info. Raises StopRequested when LightChain.stop() has been called. """ should_stop = False async def wait_for_stop_event(): nonlocal should_stop await self._should_stop.wait() should_stop = True # Wait for either a new announcement or the _should_stop event. done, pending = await asyncio.wait( [self._announcement_queue.get(), wait_for_stop_event()], return_when=asyncio.FIRST_COMPLETED) # The asyncio.wait() call above may return both tasks as done, but never both as pending, # although to be future-proof (in case more than 2 tasks are passed in to wait()), we # iterate over all pending tasks and cancel all of them. for task in pending: task.cancel() if should_stop: raise StopRequested() return done.pop().result()
async def wait_for_announcement(self) -> Tuple[LESPeer, les.HeadInfo]: """Wait for a new announcement from any of our connected peers. Returns a tuple containing the LESPeer on which the announcement was received and the announcement info. Raises StopRequested when LightChain.stop() has been called. """ should_stop = False async def wait_for_stop_event(): nonlocal should_stop await self._should_stop.wait() should_stop = True # Wait for either a new announcement or the _should_stop event. done, pending = await asyncio.wait( [self._announcement_queue.get(), wait_for_stop_event()], return_when=asyncio.FIRST_COMPLETED) # The async call above returns as soon as one of our 2 coroutines complete, so we know # for sure we'll have one task in the <done> and one task in the <pending> set. pending.pop().cancel() if should_stop: raise StopRequested() return done.pop().result()
async def fetch_headers(self, start_block: int, peer: LESPeer) -> List[BlockHeader]: for i in range(self.max_consecutive_timeouts): if self._should_stop.is_set(): raise StopRequested() try: return await peer.fetch_headers_starting_at(start_block) except asyncio.TimeoutError: self.logger.info( "Timeout when fetching headers from %s (attempt %d of %d)", peer, i + 1, self.max_consecutive_timeouts) # TODO: Figure out what's a good value to use here. await asyncio.sleep(0.5) raise TooManyTimeouts()