def _find_stream_datatype_handlers( self, event_name: str, event_info: EventDescriptor) -> Dict[str, type]: """ Computes a dictionary of `{datatype name: datatype class}` that event steps can be handle when consuming from an stream. """ base_event, _ = event_and_step(event_name) impl = find_event_handler(app_config=self.app_config, event_name=base_event, event_info=event_info) all_steps = extract_module_steps(impl) steps = effective_steps(event_name, all_steps) datatypes = {} for _, _, step in steps: _, datatype, _, _ = step if hasattr(datatype, '__stream_event__'): datatypes[ f"{datatype.__module__}.{datatype.__qualname__}"] = datatype elif datatype is DataObject: for type_name in event_info.dataobjects: datatype = find_datobject_type(type_name) datatypes[ f"{datatype.__module__}.{datatype.__qualname__}"] = datatype if len(datatypes) == 0: raise NotImplementedError( f"No data types found to read from stream in event={event_name}. " "Dataclasses must be decorated with `@dataobject` to be used in streams" ) return datatypes
def test_split_event_stages(mock_app_config): # noqa: F811 impl = find_event_handler( app_config=mock_app_config, event_name='mock_shuffle_event', event_info=mock_app_config.events['mock_shuffle_event'] ) event_info = mock_app_config.events['mock_shuffle_event'] stages = split_event_stages(mock_app_config.app, event_name='mock_shuffle_event', event_info=event_info, impl=impl) assert stages == { 'mock_shuffle_event': EventDescriptor( type=EventType.GET, read_stream=event_info.read_stream, write_stream=WriteStreamDescriptor( name='mock_app.test.mock_shuffle_event.produce_messages', queues=['AUTO'], queue_strategy=StreamQueueStrategy.PROPAGATE ), auth=[] ), 'mock_shuffle_event$consume_stream': EventDescriptor( type=EventType.STREAM, read_stream=ReadStreamDescriptor( name='mock_app.test.mock_shuffle_event.produce_messages', consumer_group='mock_app.test.mock_shuffle_event.consume_stream', queues=['AUTO'] ), write_stream=event_info.write_stream, auth=[] ) }
async def config_graph(collector: Collector, context: EventContext) -> Optional[Graph]: """ Generates Graph object with nodes and edges from server runtime active configuration """ options: VisualizationOptions = await collector['payload'] all_apps: RuntimeApps = await collector['runtime_apps'] filterd_apps = (runtime_info.app_config for app_key, runtime_info in all_apps.apps.items() if _filter_apps(runtime_info, options) and _filter_hosts(runtime_info, options)) events = {} for app_config in filterd_apps: for event_name, event_info in app_config.events.items(): impl = find_event_handler(app_config=app_config, event_name=event_name) splits = split_event_stages(app_config.app, event_name, event_info, impl) for name, info in splits.items(): events[f"{app_config.app_key()}.{name}"] = info nodes = get_nodes(events, expand_queues=options.expand_queues) edges = get_edges(nodes) return Graph(nodes=nodes, edges=edges)
async def execute_service(app_config: AppConfig, event_name: str, max_events: int = 1, mocks: Optional[List[Callable[[ModuleType, EventContext], None]]] = None) \ -> List[Union[EventPayload, Exception]]: """ Executes __service__ handler of an event, and processes a maximum of `max_events`. :param app_config: AppConfig :param event_name: event_name, for a SERVICE event :param max_events: int, default 1, number of events to process as part of the test :param mocks: mocks to be forwarded to `execute_event` :return: List of results of processing available events """ context = create_test_context(app_config, event_name) impl = find_event_handler(app_config=app_config, event_name=event_name) handler = getattr(impl, '__service__') count = 0 results = [] async for payload in handler(context): results.append(await execute_event(app_config, event_name, payload, mocks)) count += 1 if count >= max_events: break return results
def find_datatype_handler(*, app_config: AppConfig, event_name: str, event_info: EventDescriptor): base_event, from_step = event_and_step(event_name) impl = find_event_handler(app_config=app_config, event_name=base_event, event_info=event_info) datatype = extract_input_type(impl, from_step=from_step) return datatype
def _config_effective_events( app_config: AppConfig) -> Dict[str, EventDescriptor]: effective_events: Dict[str, EventDescriptor] = {} for event_name, event_info in app_config.events.items(): impl = find_event_handler(app_config=app_config, event_name=event_name) splits = split_event_stages(app_config.app, event_name, event_info, impl) effective_events.update(**splits) return effective_events
def _extract_event_api_spec(app_config: AppConfig, event_name: str, event_info: EventDescriptor) -> Optional[dict]: """ Extract __api__ definition from event implementation """ module = find_event_handler(app_config=app_config, event_name=event_name, event_info=event_info) if hasattr(module, '__api__'): method_spec = getattr(module, '__api__') if isinstance(method_spec, dict): return method_spec return method_spec(module, app_config, event_name, None) return None
def test_collector_steps_descriptor(mock_app_config): # noqa: F811 impl = find_event_handler(app_config=mock_app_config, event_name='mock_collector') steps = extract_module_steps(impl) collector: CollectorStepsDescriptor = steps[0][0] assert isinstance(collector, CollectorStepsDescriptor) assert collector.steps == [ ('step1', mock_collector.step1), ('step2', mock_collector.step2), ('step3', mock_collector.step3) ] assert collector.__name__ == "collector@step1" assert collector.input_type is MockData assert collector.step_names == ['step1', 'step2', 'step3']
def _generate_schemas(app_config: AppConfig, event_name: str) -> dict: """ Generate all schemas for a given event, based on steps signatures """ module = find_event_handler(app_config=app_config, event_name=event_name) steps = extract_module_steps(module) schemas: dict = {} for _, step_info in steps: _update_step_schemas(schemas, step_info) step_info = extract_postprocess_handler(module) _update_step_schemas(schemas, step_info) step_info = extract_preprocess_handler(module) _update_step_schemas(schemas, step_info) return schemas
def load_modules(self, effective_events: Dict[str, EventDescriptor]): for event_name, event_info in effective_events.items(): base_event, _ = event_and_step(event_name) module = find_event_handler(app_config=self.app_config, event_name=base_event) steps = extract_module_steps(module) self.modules[base_event] = (module, False, steps) self.preprocess_handlers[base_event] = extract_preprocess_handler( module) self.postprocess_handlers[ base_event] = extract_postprocess_handler(module) setup_app_logger(module, app_config=self.app_config, name=base_event, event_info=event_info) self.steps[event_name] = effective_steps(event_name, steps)
def load_modules(self, effective_events: Dict[str, EventDescriptor]): """ Load and setup event implementation modules. Set logger and load settings. """ for event_name, event_info in effective_events.items(): base_event, _ = event_and_step(event_name) module = find_event_handler(app_config=self.app_config, event_name=base_event, event_info=event_info) steps = extract_module_steps(module) self.modules[base_event] = (module, False, steps) self.preprocess_handlers[base_event] = extract_preprocess_handler( module) self.postprocess_handlers[ base_event] = extract_postprocess_handler(module) event_settings = get_event_settings(self.settings, event_name) setup_app_logger(module, app_config=self.app_config, name=base_event, event_settings=event_settings) self.steps[event_name] = effective_steps(event_name, steps)
def _find_stream_datatype_handlers(self, event_name: str) -> Dict[str, type]: """ Computes a dictionary of `{datatype name: datatype class}` that event steps can be handle when consuming from an stream. """ base_event, _ = event_and_step(event_name) impl = find_event_handler(app_config=self.app_config, event_name=base_event) all_steps = extract_module_steps(impl) steps = effective_steps(event_name, all_steps) datatypes = {} for _, step in steps.items(): _, datatype, _ = step if hasattr(datatype, '__stream_event__'): datatypes[datatype.__name__] = datatype if len(datatypes) == 0: raise NotImplementedError( f"No data types found to read from stream in even={event_name}. " "Dataclasses must be decorated with `@dataobject` to be used in streams" ) return datatypes
def _config_effective_events( app_config: AppConfig, enabled_groups: List[str]) -> Dict[str, EventDescriptor]: """ Return effective events computed from user app config. Effective events could be result of splitting a single event in stages, using the "SHUFFLE" keyword, that will internaally generate 2 events. Or for STREAMS that implements the `__service__` method, both a STREAM and a SERVICE event will be generated. Only events with groups defined in `enabled_groups` list will be returned. If `enabled_groups` is empty, all events wil be considered. """ effective_events: Dict[str, EventDescriptor] = {} assert app_config.server for event_name, event_info in app_config.events.items(): if (len(enabled_groups) == 0 or event_info.group == EventDescriptor.DEFAULT_GROUP or event_info.group in enabled_groups): impl = find_event_handler(app_config=app_config, event_name=event_name, event_info=event_info) # Add events resultant of splitting steps on SHUFFLE (stages) splits = split_event_stages(app_config.app, event_name, event_info, impl) effective_events.update(**splits) # Add associated SERVICE events to streams if event_info.type == EventType.STREAM and hasattr( impl, "__service__"): effective_events[ f"{event_name}$__service__"] = EventDescriptor( type=EventType.SERVICE, connections=event_info.connections, impl=event_info.impl, ) return effective_events
def test_find_event_handler_custom_impl(mock_app_config): # noqa: F811 impl = find_event_handler( app_config=mock_app_config, event_name='mock_event_custom', event_info=mock_app_config.events['mock_event_custom']) assert impl is mock_event_custom_impl
async def service_loop( self, *, event_name: str, test_mode: bool = False ) -> Optional[Union[EventPayload, Exception]]: """ Service loop, executes `__service__` handler in event and execute event steps for each yielded payload. :param event_name: str, an event name contained in app_config :param test_mode: bool, set to True to immediately stop and return results for testing :return: last result or exception, only intended to be used in test_mode """ assert self.app_config.server is not None log_info = { 'app_key': self.app_config.app_key(), 'event_name': event_name } assert not self._running[event_name].locked( ), f"Cannot start service, event already running {event_name}" await self._running[event_name].acquire() wait = self.app_config.server.streams.delay_auto_start_seconds if wait > 0: wait = int(wait / 2) + random.randint(0, wait) - random.randint( 0, int(wait / 2)) logger.info(__name__, f"Start service: waiting seconds={wait}...", extra=extra(prefix='service.', **log_info)) await asyncio.sleep(wait) logger.info(__name__, "Starting service...", extra=extra(prefix='service.', **log_info)) impl = find_event_handler(app_config=self.app_config, event_name=event_name) service_handler = getattr(impl, '__service__') assert service_handler is not None, \ f"{event_name} must implement method `__service__(context) -> Spawn[...]` to run as a service" context = self._service_event_context(event_name=event_name) last_result = None if self._running[event_name].locked(): async for payload in service_handler(context): try: context = self._service_event_context( event_name=event_name, previous_context=context) logger.start(context, extra=extra(prefix='service.', **log_info)) last_result = await self.execute(context=context, query_args=None, payload=payload) logger.done(context, extra=extra(prefix='service.', **log_info)) if not self._running[event_name].locked(): logger.info(__name__, "Stopped service.", extra=extra(prefix='service.', **log_info)) break except CancelledError as e: logger.error(context, 'Cancelled', extra=extra(prefix='service.', **log_info)) logger.failed(context, extra=extra(prefix='service.', **log_info)) last_result = e except Exception as e: # pylint: disable=broad-except logger.error(context, e, extra=extra(prefix='service.', **log_info)) logger.failed(context, extra=extra(prefix='service.', **log_info)) last_result = e if test_mode: self._running[event_name].release() return last_result else: logger.info(__name__, "Stopped service.", extra=extra(prefix='service.', **log_info)) logger.info(__name__, "Finished service.", extra=extra(prefix='service.', **log_info)) return last_result
def test_find_event_handler_not_found(mock_app_config): # noqa: F811 with pytest.raises(ImportError): find_event_handler(app_config=mock_app_config, event_name='unknown')
async def execute_event( app_config: AppConfig, event_name: str, payload: Optional[EventPayload], mocks: Optional[List[Callable[[ModuleType, EventContext], None]]] = None, *, fields: Optional[Dict[str, str]] = None, upload: Optional[Dict[str, bytes]] = None, preprocess: bool = False, postprocess: bool = False, **kwargs ) -> Union[Optional[EventPayload], List[EventPayload], Tuple[ Optional[EventPayload], EventPayload, PostprocessHook], Tuple[ List[EventPayload], EventPayload, PostprocessHook]]: """ Test executes an app event. Notice that event implementation file needs to be saved to disk since this will simulate execution similar to how engine actually execute events. Writing to stream will be ignored. :param app_config: AppConfig, load using `app_config = config('path/to/app-config.json')` :param event_name: str, name of the event / module to execute :param payload: test payload to send to initial step :param mocks: lists of functions to execute in order to mock functionality :param postprocess: enables testing __postprocess__ called with last step result or result before a SHUFFLE step if present. :param kwargs: that will be forwarded to the initial step of the event :return: the results of executing the event, for simple events it will be a single object, for events with initial Spawn[...] the results will be collected as a list. If postprocess is true, a tuple of 3 elements is return, first element is results as described above, second element the output of call to __postprocess__, and third one a PostprocessHook with response information used during call to __postprocess__ """ async def _postprocess(hook: PostprocessHook, results: List[EventPayload]) -> EventPayload: pp_payload = results[-1] if len(results) > 0 else None return await handler.postprocess(context=context, payload=pp_payload, response=hook) async def _preprocess(hook: PreprocessHook, payload: EventPayload) -> EventPayload: return await handler.preprocess(context=context, query_args=kwargs, payload=payload, request=hook) context = create_test_context(app_config, event_name) impl = find_event_handler(app_config=app_config, event_name=event_name) event_info = app_config.events[event_name] effective_events = { **split_event_stages(app_config.app, event_name, event_info, impl) } handler = EventHandler(app_config=app_config, plugins=[], effective_events=effective_events) preprocess_hook, postprocess_hook = None, None if preprocess: preprocess_hook = PreprocessHook( headers=CIMultiDictProxy(CIMultiDict()), multipart_reader=MockMultipartReader(fields or {}, upload or {}), # type: ignore file_hook_factory=MockFileHook) if postprocess: postprocess_hook = PostprocessHook() if mocks is not None: _apply_mocks(context, handler, event_name, effective_events, preprocess_hook, postprocess_hook, mocks) if preprocess_hook: payload = await _preprocess(preprocess_hook, payload) if postprocess_hook and preprocess_hook.status is not None: postprocess_hook.set_status(preprocess_hook.status) datatype = find_datatype_handler(app_config=app_config, event_name=event_name) if datatype is None: if payload is not None: return (payload, payload, postprocess_hook) if postprocess else payload elif not isinstance(payload, datatype): return (payload, payload, postprocess_hook) if postprocess else payload on_queue, pp_result, pp_called = [payload], None, False for effective_event_name, event_info in effective_events.items(): context = create_test_context(app_config, effective_event_name) stage_results = [] for elem in on_queue: async for res in handler.handle_async_event(context=context, query_args=kwargs, payload=elem): stage_results.append(res) on_queue = stage_results if len(stage_results) > 0 else on_queue if postprocess_hook and not pp_called: pp_called = True pp_result = await _postprocess(postprocess_hook, on_queue) kwargs = {} if postprocess: if len(on_queue) == 0: return None, pp_result, postprocess_hook if len(on_queue) == 1: return on_queue[0], pp_result, postprocess_hook return list(on_queue), pp_result, postprocess_hook if len(on_queue) == 0: return None if len(on_queue) == 1: return on_queue[0] return list(on_queue)
def test_find_event_handler(mock_app_config): # noqa: F811 impl = find_event_handler(app_config=mock_app_config, event_name='mock_event') assert impl is mock_event