def __init__(self, chain: BaseAsyncChain, db: BaseAsyncChainDB, peer_pool: ETHPeerPool, header_syncer: HeaderSyncerAPI, token: CancelToken = None) -> None: super().__init__(token=token) self.chain = chain self.db = db self._peer_pool = peer_pool self._pending_bodies = {} self._header_syncer = header_syncer # queue up any idle peers, in order of how fast they return block bodies self._body_peers: WaitingPeers[ETHPeer] = WaitingPeers( commands.BlockBodies) # Track incomplete block body download tasks # - arbitrarily allow several requests-worth of headers queued up # - try to get bodies from lower block numbers first buffer_size = MAX_BODIES_FETCH * REQUEST_BUFFER_MULTIPLIER self._block_body_tasks = TaskQueue(buffer_size, attrgetter('block_number')) # Track if there is capacity for more block importing self._db_buffer_capacity = asyncio.Event() self._db_buffer_capacity.set() # start with capacity # Track if any headers have been received yet self._got_first_header = asyncio.Event()
def __init__(self, chain: BaseAsyncChain, db: BaseAsyncChainDB, peer_pool: ETHPeerPool, header_syncer: HeaderSyncerAPI, token: CancelToken = None) -> None: super().__init__(chain, db, peer_pool, token) # queue up any idle peers, in order of how fast they return receipts self._receipt_peers: WaitingPeers[ETHPeer] = WaitingPeers( commands.Receipts) self._header_syncer = header_syncer # Track receipt download tasks # - arbitrarily allow several requests-worth of headers queued up # - try to get receipts from lower block numbers first buffer_size = MAX_RECEIPTS_FETCH * REQUEST_BUFFER_MULTIPLIER self._receipt_tasks = TaskQueue(buffer_size, attrgetter('block_number')) # track when both bodies and receipts are collected, so that blocks can be persisted self._block_persist_tracker = OrderedTaskPreparation( BlockPersistPrereqs, id_extractor=attrgetter('hash'), # make sure that a block is not persisted until the parent block is persisted dependency_extractor=attrgetter('parent_hash'), ) # Track whether the fast chain syncer completed its goal self.is_complete = False
def __init__(self, chain: AsyncChainAPI, db: BaseAsyncChainDB, peer_pool: ETHPeerPool, header_syncer: HeaderSyncerAPI, token: CancelToken = None) -> None: super().__init__(token=token) self.chain = chain self.db = db self._peer_pool = peer_pool self._pending_bodies = {} self._header_syncer = header_syncer # queue up any idle peers, in order of how fast they return block bodies self._body_peers: WaitingPeers[ETHPeer] = WaitingPeers(commands.BlockBodies) # Track incomplete block body download tasks # - arbitrarily allow several requests-worth of headers queued up # - try to get bodies from lower block numbers first buffer_size = MAX_BODIES_FETCH * REQUEST_BUFFER_MULTIPLIER self._block_body_tasks = TaskQueue(buffer_size, attrgetter('block_number')) # Track if there is capacity for more block importing self._db_buffer_capacity = asyncio.Event() self._db_buffer_capacity.set() # start with capacity # Track if any headers have been received yet self._got_first_header = asyncio.Event() # Keep a copy of old state roots to use when previewing transactions # Preview needs a header's *parent's* state root. Sometimes the parent # header isn't in the database yet, so must be looked up here. self._block_hash_to_state_root: Dict[Hash32, Hash32] = {}
async def test_wait_empty_queue(): q = TaskQueue() try: await wait(q.get()) except asyncio.TimeoutError: pass else: assert False, "should not return from get() when nothing is available on queue"
async def test_get_nowait(tasks, get_size, expected_tasks): q = TaskQueue() await q.add(tasks) batch, tasks = q.get_nowait(get_size) assert tasks == expected_tasks await q.complete(batch, tasks) assert all(task not in q for task in tasks)
async def test_unfinished_tasks_readded(): q = TaskQueue() await wait(q.add((2, 1, 3))) batch, tasks = await wait(q.get()) q.complete(batch, (2, )) batch, tasks = await wait(q.get()) assert tasks == (1, 3)
async def test_cannot_complete_batch_with_wrong_task(): q = TaskQueue() await wait(q.add((1, 2))) batch, tasks = await wait(q.get()) # cannot complete a valid task with a task it wasn't given with pytest.raises(ValidationError): await q.complete(batch, (3, 4)) # partially invalid completion calls leave the valid task in an incomplete state with pytest.raises(ValidationError): await q.complete(batch, (1, 3)) assert 1 in q
def __init__(self, chain: BaseAsyncChain, peer_pool: BaseChainPeerPool, stitcher: HeaderStitcher, token: CancelToken) -> None: super().__init__(token=token) self._chain = chain self._stitcher = stitcher max_pending_fillers = 50 self._filler_header_tasks = TaskQueue( max_pending_fillers, # order by block number of the parent header compose(attrgetter('block_number'), itemgetter(0)), ) # queue up idle peers, ordered by speed that they return block bodies self._waiting_peers: WaitingPeers[TChainPeer] = WaitingPeers( BaseBlockHeaders) self._peer_pool = peer_pool
def __init__(self, chain: AsyncChainAPI, peer_pool: BaseChainPeerPool, stitcher: HeaderStitcher) -> None: self.logger = get_logger('trinity.sync.common.headers.SkeletonSyncer') self._chain = chain self._stitcher = stitcher max_pending_fillers = 50 self._filler_header_tasks = TaskQueue( max_pending_fillers, # order by block number of the parent header compose(attrgetter('block_number'), itemgetter(0)), ) # queue up idle peers, ordered by speed that they return block bodies self._waiting_peers: WaitingPeers[TChainPeer] = WaitingPeers( (ETHBlockHeaders, LESBlockHEaders), ) self._peer_pool = peer_pool self.sync_progress: SyncProgress = None
async def test_queue_contains_task_until_complete(tasks): q = TaskQueue(order_fn=id) first_task = tasks[0] assert first_task not in q await wait(q.add(tasks)) assert first_task in q batch, pending_tasks = await wait(q.get()) assert first_task in q q.complete(batch, pending_tasks) assert first_task not in q
async def test_queue_get_cap(start_tasks, get_max, expected, remainder): q = TaskQueue() await wait(q.add(start_tasks)) batch, tasks = await wait(q.get(get_max)) assert tasks == expected if remainder: _, tasks2 = await wait(q.get()) assert tasks2 == remainder else: try: _, tasks2 = await wait(q.get()) except asyncio.TimeoutError: pass else: raise AssertionError(f"No more tasks to get, but got {tasks2!r}")
async def test_cannot_complete_batch_unless_pending(): q = TaskQueue() await wait(q.add((1, 2))) # cannot complete a valid task without a batch id with pytest.raises(ValidationError): await q.complete(None, (1, 2)) assert 1 in q batch, tasks = await wait(q.get()) # cannot complete a valid task with an invalid batch id with pytest.raises(ValidationError): await q.complete(batch + 1, (1, 2)) assert 1 in q
async def test_unfinished_tasks_readded(): q = TaskQueue() await wait(q.add((2, 1, 3))) assert q.num_pending() == 3 batch, tasks = await wait(q.get()) assert q.num_pending() == 0 await q.complete(batch, (2, )) assert q.num_pending() == 2 batch, tasks = await wait(q.get()) assert tasks == (1, 3) assert q.num_pending() == 0
def __init__(self, chain: BaseAsyncChain, db: BaseAsyncChainDB, peer_pool: ETHPeerPool, token: CancelToken = None) -> None: super().__init__(token=token) self.chain = chain self.db = db self._peer_pool = peer_pool self._pending_bodies = {} # queue up any idle peers, in order of how fast they return block bodies self._body_peers: WaitingPeers[ETHPeer] = WaitingPeers(commands.BlockBodies) # Track incomplete block body download tasks # - arbitrarily allow several requests-worth of headers queued up # - try to get bodies from lower block numbers first buffer_size = MAX_BODIES_FETCH * REQUEST_BUFFER_MULTIPLIER self._block_body_tasks = TaskQueue(buffer_size, attrgetter('block_number'))
async def test_queue_size_reset_after_complete(): q = TaskQueue(maxsize=2) await wait(q.add((1, 2))) batch, tasks = await wait(q.get()) # there should not be room to add another task try: await wait(q.add((3, ))) except asyncio.TimeoutError: pass else: raise AssertionError("should not be able to add task past maxsize") # do imaginary work here, then complete it all await q.complete(batch, tasks) # there should be room to add more now await wait(q.add((3, )))
async def test_unlimited_queue_by_default(): q = TaskQueue() await wait(q.add(tuple(range(100001))))
async def test_no_asyncio_exception_leaks(operations, queue_size, add_size, get_size, event_loop): """ This could be made much more general, at the cost of simplicity. For now, this mimics real usage enough to hopefully catch the big issues. Some examples for more generality: - different get sizes on each call - complete varying amounts of tasks at each call """ async def getter(queue, num_tasks, get_event, complete_event, cancel_token): with trap_operation_cancelled(): # wait to run the get await cancel_token.cancellable_wait(get_event.wait()) batch, tasks = await cancel_token.cancellable_wait( queue.get(num_tasks)) get_event.clear() # wait to run the completion await cancel_token.cancellable_wait(complete_event.wait()) await queue.complete(batch, tasks) complete_event.clear() async def adder(queue, add_size, add_event, cancel_token): with trap_operation_cancelled(): # wait to run the add await cancel_token.cancellable_wait(add_event.wait()) await cancel_token.cancellable_wait( queue.add( tuple(random.randint(0, 2**32) for _ in range(add_size)))) add_event.clear() async def operation_order(operations, events, cancel_token): for operation_id, pause in operations: events[operation_id].set() if pause: await asyncio.sleep(0) await asyncio.sleep(0) cancel_token.trigger() q = TaskQueue(queue_size) events = tuple(Event() for _ in range(6)) add_event, add2_event, get_event, get2_event, complete_event, complete2_event = events cancel_token = CancelToken('end test') done, pending = await asyncio.wait([ getter(q, get_size, get_event, complete_event, cancel_token), getter(q, get_size, get2_event, complete2_event, cancel_token), adder(q, add_size, add_event, cancel_token), adder(q, add_size, add2_event, cancel_token), operation_order(operations, events, cancel_token), ], return_when=asyncio.FIRST_EXCEPTION) for task in done: exc = task.exception() if exc: raise exc assert not pending
async def test_default_priority_order(): q = TaskQueue(maxsize=4) await wait(q.add((2, 1, 3))) (batch, tasks) = await wait(q.get()) assert tasks == (1, 2, 3)
def test_get_nowait_queuefull(get_size): q = TaskQueue() with pytest.raises(asyncio.QueueFull): q.get_nowait(get_size)
async def test_cannot_readd_same_task(): q = TaskQueue() await q.add((1, 2)) with pytest.raises(ValidationError): await q.add((2, ))
async def test_two_pending_adds_one_release(): q = TaskQueue(2) asyncio.ensure_future(q.add((3, 1, 2))) # wait for ^ to run and pause await asyncio.sleep(0) # note that the highest-priority items are queued first assert 1 in q assert 2 in q assert 3 not in q # two tasks are queued, none are started assert len(q) == 2 assert q.num_in_progress() == 0 asyncio.ensure_future(q.add((0, 4))) # wait for ^ to run and pause await asyncio.sleep(0) # task consumer 1 completes the first two pending batch, tasks = await wait(q.get()) assert tasks == (1, 2) # both tasks started assert len(q) == 2 assert q.num_in_progress() == 2 await q.complete(batch, tasks) # tasks are drained, but new ones aren't added yet... assert q.num_in_progress() == 0 assert len(q) == 0 await asyncio.sleep(0.01) # Now the tasks are added assert q.num_in_progress() == 0 assert len(q) == 2 # task consumer 2 gets the next two, in priority order batch, tasks = await wait(q.get()) assert len(tasks) == 2 assert tasks == (0, 3) assert q.num_in_progress() == 2 assert len(q) == 2 # clean up, so the pending get() call can complete await q.complete(batch, tasks) # All current tasks finished assert q.num_in_progress() == 0 await asyncio.sleep(0) # only task 4 remains assert q.num_in_progress() == 0 assert len(q) == 1
async def test_custom_priority_order(): q = TaskQueue(maxsize=4, order_fn=lambda x: 0 - x) await wait(q.add((2, 1, 3))) (batch, tasks) = await wait(q.get()) assert tasks == (3, 2, 1)
async def test_valid_priority_order(order_fn): q = TaskQueue(order_fn=order_fn) # this just needs to not crash, when testing sortability await wait(q.add((1, )))
async def test_cannot_add_single_non_tuple_task(): q = TaskQueue() with pytest.raises(ValidationError): await wait(q.add(1))
async def test_invalid_priority_order(order_fn): q = TaskQueue(order_fn=order_fn) with pytest.raises(ValidationError): await wait(q.add((1, )))