Beispiel #1
0
async def _do_mock_response(request_type: Type[BaseEvent], response: BaseEvent,
                            event_bus: EndpointAPI,
                            ready: asyncio.Event) -> None:
    ready.set()
    async for req in event_bus.stream(request_type):
        await event_bus.broadcast(response, req.broadcast_config())
        break
Beispiel #2
0
    async def serve(self, event_bus: EndpointAPI,
                    beam_chain: BeamChain) -> None:
        """
        Listen to DoStatelessBlockImport events, and import block when received.
        Reply with StatelessBlockImportDone when import is complete.
        """

        loop = asyncio.get_event_loop()
        async for event in event_bus.stream(DoStatelessBlockImport):
            # launch in new thread, so we don't block the event loop!
            import_completion = loop.run_in_executor(
                # Maybe build the pausing chain inside the new process?
                None,
                partial_import_block(beam_chain, event.block),
            )

            # Wrapped in `asyncio.shield` because we want to hang the service from
            #   shutting down until block import is complete.
            # In the tests, for example, we await cancel() this service, so that we know
            #   that the in-progress block is complete. Then below, we do not send back
            #   the import completion (so the import server won't get triggered again).
            await asyncio.shield(import_completion)

            if self.manager.is_running:
                _broadcast_import_complete(
                    event_bus,
                    event.block,
                    event.broadcast_config(),
                    import_completion,  # type: ignore
                )
            else:
                break
Beispiel #3
0
    async def serve(
            self,
            event_bus: EndpointAPI,
            beam_chain: BeamChain) -> None:
        """
        Listen to DoStatelessBlockPreview events, and execute the transactions to prefill
        all the needed state data.
        """
        with futures.ThreadPoolExecutor(
            max_workers=MAX_SPECULATIVE_EXECUTIONS_PER_PROCESS,
            thread_name_prefix="trinity-spec-exec-",
        ) as speculative_thread_executor:

            async for event in event_bus.stream(DoStatelessBlockPreview):
                if event.header.block_number % NUM_PREVIEW_SHARDS != self._shard_num:
                    continue

                self.logger.debug(
                    "DoStatelessBlockPreview-%d is previewing new block: %s",
                    self._shard_num,
                    event.header,
                )
                # Parallel Execution:
                # Run a complete block end-to-end
                asyncio.get_event_loop().run_in_executor(
                    # Maybe build the pausing chain inside the new process,
                    # so we can use process pool?
                    None,
                    partial_trigger_missing_state_downloads(
                        beam_chain,
                        event.header,
                        event.transactions,
                    )
                )

                # Speculative Execution:
                # Split transactions into groups by sender, and run them independently.
                # This effectively assumes that the transactions by each sender are not
                #   affected by any other transactions in the block. This is often true,
                #   so it helps speed up the search for data.
                # Being able to retrieve this predicted data in parallel, asking for more
                # trie nodes in each GetNodeData request, can help make the difference
                # between keeping up and falling behind, on the network.
                transaction_groups = groupby(attrgetter('sender'), event.transactions)
                for sender_transactions in transaction_groups.values():
                    asyncio.get_event_loop().run_in_executor(
                        speculative_thread_executor,
                        partial_speculative_execute(
                            beam_chain,
                            event.header,
                            sender_transactions,
                        )
                    )
Beispiel #4
0
    async def serve(
            self,
            event_bus: EndpointAPI,
            beam_chain: BaseAsyncChain) -> None:
        """
        Listen to DoStatelessBlockImport events, and import block when received.
        Reply with StatelessBlockImportDone when import is complete.
        """

        async for event in self.wait_iter(event_bus.stream(DoStatelessBlockImport)):
            # launch in new thread, so we don't block the event loop!
            import_completion = self.get_event_loop().run_in_executor(
                # Maybe build the pausing chain inside the new process?
                None,
                partial(
                    beam_chain.import_block,
                    event.block,
                    perform_validation=True,
                ),
            )

            # Intentionally don't use .wait() below, because we want to hang the service from
            #   shutting down until block import is complete.
            # In the tests, for example, we await cancel() this service, so that we know
            #   that the in-progress block is complete. Then below, we do not send back
            #   the import completion (so the import server won't get triggered again).
            await import_completion

            if self.is_running:
                _broadcast_import_complete(  # type: ignore
                    event_bus,
                    event.block,
                    event.broadcast_config(),
                    import_completion,
                )
            else:
                break
Beispiel #5
0
    async def _auto_connect_new_announced_endpoints(
        self,
        endpoint: EndpointAPI,
    ) -> None:
        """
        Connect the given endpoint to all new endpoints on the given stream
        """
        async for ev in endpoint.stream(AvailableEndpointsUpdated):
            # We only connect to Endpoints that appear after our own Endpoint in the set.
            # This ensures that we don't try to connect to an Endpoint while that remote
            # Endpoint also wants to connect to us.
            endpoints_to_connect_to = tuple(
                connection_config
                for index, val in enumerate(ev.available_endpoints)
                if val.name == endpoint.name
                for connection_config in ev.available_endpoints[index:]
                if not endpoint.is_connected_to(connection_config.name))
            if not endpoints_to_connect_to:
                continue

            endpoint_names = ",".join(
                (config.name for config in endpoints_to_connect_to))
            self.logger.debug(
                "EventBus Endpoint %s connecting to other Endpoints: %s",
                endpoint.name,
                endpoint_names,
            )
            try:
                await endpoint.connect_to_endpoints(*endpoints_to_connect_to)
            except Exception as e:
                self.logger.warning(
                    "Failed to connect %s to one of %s: %s",
                    endpoint.name,
                    endpoint_names,
                    e,
                )
                raise
Beispiel #6
0
 async def _handle_sync_status_requests(self,
                                        event_bus: EndpointAPI) -> None:
     async for req in self.wait_iter(event_bus.stream(SyncingRequest)):
         await event_bus.broadcast(
             SyncingResponse(*self._get_sync_status()),
             req.broadcast_config())