async def mock_handle_postprocess(app_config, *, payload, expected,
                                  expected_response):
    handler = EventHandler(app_config=app_config,
                           plugins=[],
                           effective_events=app_config.events)
    context = EventContext(app_config=app_config,
                           plugin_config=app_config,
                           event_name='plugin_event',
                           track_ids={},
                           auth_info={
                               'auth_type': AuthType.UNSECURED,
                               'allowed': 'true'
                           })
    intermediate_result = None
    async for res in handler.handle_async_event(context=context,
                                                query_args={},
                                                payload=None):
        intermediate_result = res
    hook = PostprocessHook()
    response = await handler.postprocess(context=context,
                                         payload=intermediate_result,
                                         response=hook)
    assert hook.headers == expected_response['headers']
    assert hook.cookies == expected_response['cookies']
    assert hook.status == expected_response['status']
    assert response == expected
 async def start(self):
     """
     Starts handlers, services and pools for this application
     """
     self.event_handler = EventHandler(
         app_config=self.app_config,
         plugins=self.plugins,
         effective_events=self.effective_events)
     streams_present = any(
         True for _, event_info in self.effective_events.items()
         if (event_info.type == EventType.STREAM) or (
             event_info.write_stream is not None))
     if streams_present and self.streams_enabled:
         mgr = StreamManager.create(self.app_config.server.streams)
         self.stream_manager = await mgr.connect()
     return self
async def mock_handle_request_response_event(app_config, *, payload, expected):
    handler = EventHandler(app_config=app_config,
                           plugins=[],
                           effective_events=app_config.events)
    context = EventContext(app_config=app_config,
                           plugin_config=app_config,
                           event_name='mock_post_event',
                           track_ids=MockStreamManager.test_track_ids,
                           auth_info={
                               'auth_type': AuthType.UNSECURED,
                               'allowed': 'true'
                           })
    async for response in handler.handle_async_event(
            context=context,
            query_args={"query_arg1": payload.value},
            payload=payload):
        assert response == expected
async def mock_handle_spawn_event(app_config, *, payload, expected,
                                  stream_name):
    handler = EventHandler(app_config=app_config,
                           plugins=[],
                           effective_events=app_config.events)
    context = EventContext(app_config=app_config,
                           plugin_config=app_config,
                           event_name='mock_spawn_event',
                           track_ids=MockStreamManager.test_track_ids,
                           auth_info={
                               'auth_type': AuthType.UNSECURED,
                               'allowed': 'true'
                           })
    event_count = 0
    async for result in handler.handle_async_event(context=context,
                                                   query_args={},
                                                   payload=payload.value):
        assert result.value.startswith(expected.value)
        event_count += 1
    assert event_count == 3
Exemple #5
0
def _apply_mocks(context: EventContext, handler: EventHandler, event_name: str,
                 effective_events: Dict[str, EventDescriptor],
                 preprocess_hook: Optional[PreprocessHook],
                 postprocess_hook: Optional[PostprocessHook],
                 mocks: List[Callable[[ModuleType, EventContext], None]]):
    """
    Execute a list of functions to mock module properties.
    """
    module, _, _ = handler.modules[event_name]
    logger.debug(
        context,
        f"[test.apps] executing mocks for module={module.__name__}...")
    for mock in mocks:
        hooks: Dict[str, Any] = {}
        if preprocess_hook is not None:
            hooks['preprocess_hook'] = preprocess_hook
        if postprocess_hook is not None:
            hooks['postprocess_hook'] = postprocess_hook
        mock(module, context, **hooks)  # type: ignore
    handler.load_modules(effective_events=effective_events)
    logger.debug(context, '[test.apps] mocking done.')
class AppEngine:
    """
    Engine that handles a Hopeit Application
    """
    def __init__(self,
                 *,
                 app_config: AppConfig,
                 plugins: List[AppConfig],
                 streams_enabled: bool = True):
        """
        Creates an instance of the AppEngine

        :param app_config: AppConfig, Hopeit application configuration as specified in config module
        """
        self.app_config = app_config
        self.effective_events = self._config_effective_events(app_config)
        self.plugins = plugins
        self.event_handler: Optional[EventHandler] = None
        self.streams_enabled = streams_enabled
        self.stream_manager: Optional[StreamManager] = None
        self._running: Dict[str, asyncio.Lock] = {
            event_name: asyncio.Lock()
            for event_name, event_info in self.effective_events.items()
            if event_info.type in (EventType.STREAM, EventType.SERVICE)
        }
        logger.init_app(app_config, plugins)

    async def start(self):
        """
        Starts handlers, services and pools for this application
        """
        self.event_handler = EventHandler(
            app_config=self.app_config,
            plugins=self.plugins,
            effective_events=self.effective_events)
        streams_present = any(
            True for _, event_info in self.effective_events.items()
            if (event_info.type == EventType.STREAM) or (
                event_info.write_stream is not None))
        if streams_present and self.streams_enabled:
            mgr = StreamManager.create(self.app_config.server.streams)
            self.stream_manager = await mgr.connect()
        return self

    async def stop(self):
        """
        Stops and clean handlers
        """
        logger.info(__name__, f"Stopping app={self.app_config.app_key()}...")
        for event_name in self._running.keys():
            await self.stop_event(event_name)
        if self.stream_manager:
            await asyncio.sleep(self.app_config.engine.read_stream_timeout + 5)
            await self.stream_manager.close()
        logger.info(__name__, f"Stopped app={self.app_config.app_key()}")

    async def execute(
            self, *, context: EventContext, query_args: Optional[dict],
            payload: Optional[EventPayload]) -> Optional[EventPayload]:
        """
        Executes a configured Event of type GET or POST using received payload as input,
        considering configured timeout.

        :param context: EventContext, info about app, event and tracking
        :param query_args: dict, containing query arguments to be passed to every step of event
        :param payload: EventPayload, payload to send to event handler
        :return: EventPayload, result from executing the event
        :raise: TimeoutException in case configured timeout is exceeded before getting the result
        """
        assert self.event_handler is not None, "event_handler not created. Call `start()`."
        timeout = context.event_info.config.response_timeout
        try:
            return await asyncio.wait_for(self._execute_event(
                context, query_args, payload),
                                          timeout=timeout)
        except asyncio.TimeoutError as e:
            raise TimeoutError(
                f"Response timeout exceeded seconds={timeout}") from e

    async def _execute_event(
            self,
            context: EventContext,
            query_args: Optional[dict],
            payload: Optional[EventPayload],
            queue: str = StreamQueue.AUTO) -> Optional[EventPayload]:
        """
        Process results of executing event specified in context for a given input payload.
        In case event yield multiple results (i.e. Spawn[...]) they are collected in batches
        of configured batch_size and written into output stream if configured.

        :return: result of executing the event. In case of multiple results yield from event,
        last item will be returned. If no items are yield, None will be returned.
        """
        assert self.event_handler is not None, "event_handler not created. Call `start()`."
        if self.streams_enabled and (context.event_info.write_stream
                                     is not None):
            assert self.stream_manager, "stream_manager not initialized. Call `start()`."
        event_info = self.effective_events[context.event_name]
        batch_size = event_info.config.stream.batch_size
        batch = []
        result = None
        async for result in self.event_handler.handle_async_event(
                context=context, query_args=query_args, payload=payload):
            if result is not None:
                batch.append(result)
            if (len(batch) >= batch_size) and (event_info.write_stream
                                               is not None):
                await self._write_stream_batch(batch=batch,
                                               context=context,
                                               event_info=event_info,
                                               queue=queue)
                batch.clear()
        if (len(batch) > 0) and (event_info.write_stream is not None):
            await self._write_stream_batch(batch=batch,
                                           context=context,
                                           event_info=event_info,
                                           queue=queue)
        return result

    async def _write_stream_batch(self, *, batch: List[EventPayload],
                                  context: EventContext,
                                  event_info: EventDescriptor, queue: str):
        await asyncio.gather(*[
            self._write_stream(payload=item,
                               context=context,
                               event_info=event_info,
                               upstream_queue=queue) for item in batch
        ])

    async def _write_stream(self, *, payload: EventPayload,
                            context: EventContext, event_info: EventDescriptor,
                            upstream_queue: str):
        """
        Publish payload in configured one or more queues for a given configured stream
        """
        assert self.stream_manager is not None, "stream_manager not created. Call `start()`."
        assert event_info.write_stream is not None, "write_stream name not configured"
        assert event_info.config.stream.compression, "stream compression not configured"
        assert event_info.config.stream.serialization, "stream serialization not configured"

        for configured_queue in event_info.write_stream.queues:

            stream_name = event_info.write_stream.name
            if (upstream_queue != StreamQueue.AUTO
                    and configured_queue == StreamQueue.AUTO
                    and event_info.write_stream.queue_strategy
                    == StreamQueueStrategy.PROPAGATE):
                stream_name += f".{upstream_queue}"
            elif configured_queue != StreamQueue.AUTO:
                stream_name += f".{configured_queue}"

            queue_name = (configured_queue
                          if event_info.write_stream.queue_strategy
                          == StreamQueueStrategy.DROP else upstream_queue)

            await self.stream_manager.write_stream(
                stream_name=stream_name,
                queue=queue_name,
                payload=StreamManager.as_data_event(payload),
                track_ids=context.track_ids,
                auth_info=context.auth_info,
                compression=event_info.config.stream.compression,
                serialization=event_info.config.stream.serialization,
                target_max_len=event_info.config.stream.target_max_len)

    async def preprocess(self, *, context: EventContext,
                         query_args: Optional[Dict[str, Any]],
                         payload: Optional[EventPayload],
                         request: PreprocessHook) -> Optional[EventPayload]:
        assert self.event_handler, "event_handler not created. Call `start()`."
        return await self.event_handler.preprocess(context=context,
                                                   query_args=query_args,
                                                   payload=payload,
                                                   request=request)

    async def postprocess(self, *, context: EventContext,
                          payload: Optional[EventPayload],
                          response: PostprocessHook) -> Optional[EventPayload]:
        assert self.event_handler, "event_handler not created. Call `start()`."
        return await self.event_handler.postprocess(context=context,
                                                    payload=payload,
                                                    response=response)

    async def _process_stream_event_with_timeout(
            self, stream_event: StreamEvent, stream_info: ReadStreamDescriptor,
            stream_name: str, queue: str, context: EventContext,
            stats: StreamStats,
            log_info: Dict[str, str]) -> Union[EventPayload, Exception]:
        """
        Invokes _process_stream_event with a configured timeout
        :return: result of _process_stream_event
        :raise: TimeoutError in case the event is not processed on the configured timeout
        """
        timeout = context.event_info.config.stream.timeout
        try:
            return await asyncio.wait_for(self._process_stream_event(
                stream_event=stream_event,
                stream_info=stream_info,
                stream_name=stream_name,
                queue=queue,
                context=context,
                stats=stats,
                log_info=log_info),
                                          timeout=timeout)
        except asyncio.TimeoutError:
            terr = TimeoutError(
                f'Stream processing timeout exceeded seconds={timeout}')
            logger.error(context,
                         str(terr),
                         extra=extra(prefix='stream.',
                                     **{
                                         **log_info, 'name': stream_name,
                                         'queue': queue
                                     }))
            return terr

    async def _read_stream_cycle(
        self, event_name: str, event_config: EventDescriptor,
        stream_info: ReadStreamDescriptor, datatypes: Dict[str, type],
        offset: str, stats: StreamStats, log_info: Dict[str, str],
        test_mode: bool, last_err: Optional[StreamOSError]
    ) -> Tuple[Optional[Union[EventPayload, Exception]],
               Optional[EventContext], Optional[StreamOSError]]:
        """
        Single read_stream cycle used from read_stream while loop to allow wait and retry/recover on failures
        Will read from multiple queues if configured, always starting from the first queue and stopping
        when batch_size is reached
        """
        assert self.stream_manager is not None
        assert stream_info.consumer_group is not None

        last_res, last_context = None, None

        try:
            batch: List[Awaitable[Union[EventPayload, Exception]]] = []

            for queue in stream_info.queues:

                stream_name = stream_info.name
                if queue != StreamQueue.AUTO:
                    stream_name += f".{queue}"

                for stream_event in await self.stream_manager.read_stream(
                        stream_name=stream_name,
                        consumer_group=stream_info.consumer_group,
                        datatypes=datatypes,
                        track_headers=self.app_config.engine.track_headers,
                        offset=offset,
                        batch_size=event_config.config.stream.batch_size,
                        timeout=self.app_config.engine.read_stream_timeout,
                        batch_interval=self.app_config.engine.
                        read_stream_interval):

                    stats.ensure_start()

                    if isinstance(stream_event, Exception):
                        logger.error(__name__, stream_event)
                        stats.inc(error=True)
                    else:
                        context = EventContext(
                            app_config=self.app_config,
                            plugin_config=self.app_config,
                            event_name=event_name,
                            track_ids=stream_event.track_ids,
                            auth_info=stream_auth_info(stream_event))
                        last_context = context
                        logger.start(context,
                                     extra=extra(prefix='stream.',
                                                 **{
                                                     **log_info, 'name':
                                                     stream_name,
                                                     'queue': queue
                                                 }))
                        batch.append(
                            self._process_stream_event_with_timeout(
                                stream_event=stream_event,
                                stream_info=stream_info,
                                stream_name=stream_name,
                                queue=stream_event.queue,
                                context=context,
                                stats=stats,
                                log_info=log_info))

            if len(batch) != 0:
                for result in await asyncio.gather(*batch):
                    last_res = result
            if last_context:
                logger.stats(last_context,
                             extra=extra(prefix='metrics.stream.',
                                         **stats.calc()))
            if test_mode:
                self._running[event_name].release()
            if last_err is not None:
                logger.warning(
                    __name__, f"Recovered read stream for event={event_name}.")
                last_err = None

        except StreamOSError as e:
            retry_in = self.app_config.engine.read_stream_interval // 1000
            retry_in += random.randint(1, max(3, min(30, retry_in)))
            if type(e) != type(last_err):  # pylint: disable=unidiomatic-typecheck
                logger.error(__name__, e)
            logger.error(
                __name__,
                f"Cannot read stream for event={event_name}. Waiting seconds={int(retry_in)}..."
            )
            last_err = e
            await asyncio.sleep(retry_in)  # TODO: Replace with circuit breaker

        return last_res, last_context, last_err

    async def read_stream(
            self,
            *,
            event_name: str,
            test_mode: bool = False
    ) -> Optional[Union[EventPayload, Exception]]:
        """
        Listens to a stream specified by event of type STREAM, and executes
        the event handler for each received event in the stream.

        When invoked, stream will be read continuously consuming events that are
        arriving, processing it according to configured steps.
        To interrupt listening for events, call `stop_event(event_name)`

        :param event_name: str, an event name contained in app_config
        :param test_mode: bool, set to True to immediately stop and return results for testing
        :return: last result or exception, only intended to be used in test_mode
        """
        assert self.app_config.server is not None
        stats = StreamStats()
        log_info = {
            'app_key': self.app_config.app_key(),
            'event_name': event_name
        }
        wait = self.app_config.server.streams.delay_auto_start_seconds
        if wait > 0:
            wait = int(wait / 2) + random.randint(0, wait) - random.randint(
                0, int(wait / 2))
            logger.info(__name__,
                        f"Start reading stream: waiting seconds={wait}...",
                        extra=extra(prefix='stream.', **log_info))
            await asyncio.sleep(wait)
        logger.info(__name__,
                    "Starting reading stream...",
                    extra=extra(prefix='stream.', **log_info))
        try:
            assert self.event_handler, "event_handler not created. Call `start()`."
            assert self.stream_manager, "No active stream manager. Call `start()`"
            assert not self._running[event_name].locked(
            ), "Event already running. Call `stop_event(...)`"

            event_config = self.effective_events[event_name]
            stream_info = event_config.read_stream
            assert stream_info, f"No read_stream section in config for event={event_name}"
            for queue in stream_info.queues:
                await self.stream_manager.ensure_consumer_group(
                    stream_name=(f"{stream_info.name}.{queue}"
                                 if queue != StreamQueue.AUTO else
                                 stream_info.name),
                    consumer_group=stream_info.consumer_group)

            datatypes = self._find_stream_datatype_handlers(event_name)
            log_info['name'] = stream_info.name
            log_info['consumer_group'] = stream_info.consumer_group
            assert not self._running[event_name].locked(
            ), f"Event already running {event_name}"
            await self._running[event_name].acquire()
            logger.info(__name__,
                        "Consuming stream...",
                        extra=extra(prefix='stream.', **log_info))
            offset = '>'
            last_res, last_context, last_err = None, None, None
            while self._running[event_name].locked():
                last_res, last_context, last_err = await self._read_stream_cycle(
                    event_name, event_config, stream_info, datatypes, offset,
                    stats, log_info, test_mode, last_err)
            logger.info(__name__,
                        'Stopped read_stream.',
                        extra=extra(prefix='stream.', **log_info))
            if last_context is None:
                logger.warning(__name__,
                               f"No stream events consumed in {event_name}")
            return last_res
        except (AssertionError, NotImplementedError) as e:
            logger.error(__name__, e)
            logger.error(
                __name__,
                f"Unexpectedly stopped read stream for event={event_name}")
            return e

    async def _process_stream_event(
            self, *, stream_event: StreamEvent,
            stream_info: ReadStreamDescriptor, stream_name: str, queue: str,
            context: EventContext, stats: StreamStats,
            log_info: Dict[str,
                           str]) -> Optional[Union[EventPayload, Exception]]:
        """
        Process a single stream event, execute events, ack if not failed, log error if fail

        :return: results of executing the event, or Exception if errors during processing
        """
        try:
            assert self.event_handler
            assert self.stream_manager

            result = await self._execute_event(context, None,
                                               stream_event.payload,
                                               stream_event.queue)
            await self.stream_manager.ack_read_stream(
                stream_name=stream_name,
                consumer_group=stream_info.consumer_group,
                stream_event=stream_event)
            logger.done(context,
                        extra=combined(
                            extra(prefix='stream.',
                                  **{
                                      **log_info, 'name': stream_name,
                                      'queue': queue
                                  }), metrics(context),
                            stream_metrics(context)))
            stats.inc()
            return result
        except CancelledError as e:
            extra_info = {**log_info, 'name': stream_name, 'queue': queue}
            logger.error(context,
                         'Cancelled',
                         extra=extra(prefix='stream.', **extra_info))
            logger.failed(context, extra=extra(prefix='stream.', **extra_info))
            stats.inc(error=True)
            return e
        except Exception as e:  # pylint: disable=broad-except
            extra_info = {**log_info, 'name': stream_name, 'queue': queue}
            logger.error(context,
                         e,
                         extra=extra(prefix='stream.', **extra_info))
            logger.failed(context, extra=extra(prefix='stream.', **extra_info))
            stats.inc(error=True)
            return e

    def _service_event_context(
            self,
            event_name: str,
            previous_context: Optional[EventContext] = None):
        if previous_context is None:
            track_ids = {
                'track.request_id':
                str(uuid.uuid4()),
                'track.request_ts':
                datetime.now().astimezone(timezone.utc).isoformat()
            }
        else:
            track_ids = previous_context.track_ids
        return EventContext(app_config=self.app_config,
                            plugin_config=self.app_config,
                            event_name=event_name,
                            track_ids={
                                **track_ids,
                                'track.operation_id':
                                str(uuid.uuid4()),
                            },
                            auth_info={})

    async def service_loop(
            self,
            *,
            event_name: str,
            test_mode: bool = False
    ) -> Optional[Union[EventPayload, Exception]]:
        """
        Service loop, executes `__service__` handler in event and execute
        event steps for each yielded payload.

        :param event_name: str, an event name contained in app_config
        :param test_mode: bool, set to True to immediately stop and return results for testing
        :return: last result or exception, only intended to be used in test_mode
        """
        assert self.app_config.server is not None
        log_info = {
            'app_key': self.app_config.app_key(),
            'event_name': event_name
        }
        assert not self._running[event_name].locked(
        ), f"Cannot start service, event already running {event_name}"
        await self._running[event_name].acquire()
        wait = self.app_config.server.streams.delay_auto_start_seconds
        if wait > 0:
            wait = int(wait / 2) + random.randint(0, wait) - random.randint(
                0, int(wait / 2))
            logger.info(__name__,
                        f"Start service: waiting seconds={wait}...",
                        extra=extra(prefix='service.', **log_info))
            await asyncio.sleep(wait)
        logger.info(__name__,
                    "Starting service...",
                    extra=extra(prefix='service.', **log_info))
        impl = find_event_handler(app_config=self.app_config,
                                  event_name=event_name)
        service_handler = getattr(impl, '__service__')
        assert service_handler is not None, \
            f"{event_name} must implement method `__service__(context) -> Spawn[...]` to run as a service"
        context = self._service_event_context(event_name=event_name)
        last_result = None
        if self._running[event_name].locked():
            async for payload in service_handler(context):
                try:
                    context = self._service_event_context(
                        event_name=event_name, previous_context=context)
                    logger.start(context,
                                 extra=extra(prefix='service.', **log_info))
                    last_result = await self.execute(context=context,
                                                     query_args=None,
                                                     payload=payload)
                    logger.done(context,
                                extra=extra(prefix='service.', **log_info))
                    if not self._running[event_name].locked():
                        logger.info(__name__,
                                    "Stopped service.",
                                    extra=extra(prefix='service.', **log_info))
                        break
                except CancelledError as e:
                    logger.error(context,
                                 'Cancelled',
                                 extra=extra(prefix='service.', **log_info))
                    logger.failed(context,
                                  extra=extra(prefix='service.', **log_info))
                    last_result = e
                except Exception as e:  # pylint: disable=broad-except
                    logger.error(context,
                                 e,
                                 extra=extra(prefix='service.', **log_info))
                    logger.failed(context,
                                  extra=extra(prefix='service.', **log_info))
                    last_result = e
                if test_mode:
                    self._running[event_name].release()
                    return last_result
        else:
            logger.info(__name__,
                        "Stopped service.",
                        extra=extra(prefix='service.', **log_info))
        logger.info(__name__,
                    "Finished service.",
                    extra=extra(prefix='service.', **log_info))
        return last_result

    async def stop_event(self, event_name: str):
        """
        Sets running state to stopped for a continuous-running event.
        This acts as signling for stop for STREAM events.

        :param event_name: name of the event to signal stop
        """
        if self._running[event_name].locked():
            self._running[event_name].release()

    @staticmethod
    def _config_effective_events(
            app_config: AppConfig) -> Dict[str, EventDescriptor]:
        effective_events: Dict[str, EventDescriptor] = {}
        for event_name, event_info in app_config.events.items():
            impl = find_event_handler(app_config=app_config,
                                      event_name=event_name)
            splits = split_event_stages(app_config.app, event_name, event_info,
                                        impl)
            effective_events.update(**splits)
        return effective_events

    def _find_stream_datatype_handlers(self,
                                       event_name: str) -> Dict[str, type]:
        """
        Computes a dictionary of `{datatype name: datatype class}` that event steps
        can be handle when consuming from an stream.
        """
        base_event, _ = event_and_step(event_name)
        impl = find_event_handler(app_config=self.app_config,
                                  event_name=base_event)
        all_steps = extract_module_steps(impl)
        steps = effective_steps(event_name, all_steps)
        datatypes = {}
        for _, step in steps.items():
            _, datatype, _ = step
            if hasattr(datatype, '__stream_event__'):
                datatypes[datatype.__name__] = datatype
        if len(datatypes) == 0:
            raise NotImplementedError(
                f"No data types found to read from stream in even={event_name}. "
                "Dataclasses must be decorated with `@dataobject` to be used in streams"
            )
        return datatypes
Exemple #7
0
async def execute_event(
    app_config: AppConfig,
    event_name: str,
    payload: Optional[EventPayload],
    mocks: Optional[List[Callable[[ModuleType, EventContext], None]]] = None,
    *,
    fields: Optional[Dict[str, str]] = None,
    upload: Optional[Dict[str, bytes]] = None,
    preprocess: bool = False,
    postprocess: bool = False,
    **kwargs
) -> Union[Optional[EventPayload], List[EventPayload], Tuple[
        Optional[EventPayload], EventPayload, PostprocessHook], Tuple[
            List[EventPayload], EventPayload, PostprocessHook]]:
    """
    Test executes an app event.

    Notice that event implementation file needs to be saved to disk since this will simulate
    execution similar to how engine actually execute events. Writing to stream will be ignored.

    :param app_config: AppConfig, load using `app_config = config('path/to/app-config.json')`
    :param event_name: str, name of the event / module to execute
    :param payload: test payload to send to initial step
    :param mocks: lists of functions to execute in order to mock functionality
    :param postprocess: enables testing __postprocess__ called with last step result or
        result before a SHUFFLE step if present.
    :param kwargs: that will be forwarded to the initial step of the event
    :return: the results of executing the event, for simple events it will be a single object,
        for events with initial Spawn[...] the results will be collected as a list.
        If postprocess is true, a tuple of 3 elements is return, first element is results as described
        above, second element the output of call to __postprocess__, and third one a PostprocessHook
        with response information used during call to __postprocess__
    """
    async def _postprocess(hook: PostprocessHook,
                           results: List[EventPayload]) -> EventPayload:
        pp_payload = results[-1] if len(results) > 0 else None
        return await handler.postprocess(context=context,
                                         payload=pp_payload,
                                         response=hook)

    async def _preprocess(hook: PreprocessHook,
                          payload: EventPayload) -> EventPayload:
        return await handler.preprocess(context=context,
                                        query_args=kwargs,
                                        payload=payload,
                                        request=hook)

    context = create_test_context(app_config, event_name)
    impl = find_event_handler(app_config=app_config, event_name=event_name)

    event_info = app_config.events[event_name]
    effective_events = {
        **split_event_stages(app_config.app, event_name, event_info, impl)
    }
    handler = EventHandler(app_config=app_config,
                           plugins=[],
                           effective_events=effective_events)

    preprocess_hook, postprocess_hook = None, None
    if preprocess:
        preprocess_hook = PreprocessHook(
            headers=CIMultiDictProxy(CIMultiDict()),
            multipart_reader=MockMultipartReader(fields or {}, upload
                                                 or {}),  # type: ignore
            file_hook_factory=MockFileHook)
    if postprocess:
        postprocess_hook = PostprocessHook()
    if mocks is not None:
        _apply_mocks(context, handler, event_name, effective_events,
                     preprocess_hook, postprocess_hook, mocks)

    if preprocess_hook:
        payload = await _preprocess(preprocess_hook, payload)
        if postprocess_hook and preprocess_hook.status is not None:
            postprocess_hook.set_status(preprocess_hook.status)
    datatype = find_datatype_handler(app_config=app_config,
                                     event_name=event_name)
    if datatype is None:
        if payload is not None:
            return (payload, payload,
                    postprocess_hook) if postprocess else payload
    elif not isinstance(payload, datatype):
        return (payload, payload, postprocess_hook) if postprocess else payload

    on_queue, pp_result, pp_called = [payload], None, False
    for effective_event_name, event_info in effective_events.items():
        context = create_test_context(app_config, effective_event_name)
        stage_results = []
        for elem in on_queue:
            async for res in handler.handle_async_event(context=context,
                                                        query_args=kwargs,
                                                        payload=elem):
                stage_results.append(res)
        on_queue = stage_results if len(stage_results) > 0 else on_queue
        if postprocess_hook and not pp_called:
            pp_called = True
            pp_result = await _postprocess(postprocess_hook, on_queue)
        kwargs = {}

    if postprocess:
        if len(on_queue) == 0:
            return None, pp_result, postprocess_hook
        if len(on_queue) == 1:
            return on_queue[0], pp_result, postprocess_hook
        return list(on_queue), pp_result, postprocess_hook

    if len(on_queue) == 0:
        return None
    if len(on_queue) == 1:
        return on_queue[0]
    return list(on_queue)