def _find_stream_datatype_handlers( self, event_name: str, event_info: EventDescriptor) -> Dict[str, type]: """ Computes a dictionary of `{datatype name: datatype class}` that event steps can be handle when consuming from an stream. """ base_event, _ = event_and_step(event_name) impl = find_event_handler(app_config=self.app_config, event_name=base_event, event_info=event_info) all_steps = extract_module_steps(impl) steps = effective_steps(event_name, all_steps) datatypes = {} for _, _, step in steps: _, datatype, _, _ = step if hasattr(datatype, '__stream_event__'): datatypes[ f"{datatype.__module__}.{datatype.__qualname__}"] = datatype elif datatype is DataObject: for type_name in event_info.dataobjects: datatype = find_datobject_type(type_name) datatypes[ f"{datatype.__module__}.{datatype.__qualname__}"] = datatype if len(datatypes) == 0: raise NotImplementedError( f"No data types found to read from stream in event={event_name}. " "Dataclasses must be decorated with `@dataobject` to be used in streams" ) return datatypes
def test_extract_event_steps(): impl = importlib.import_module('mock_app.mock_event') f1 = getattr(impl, 'entry_point') f2 = getattr(impl, 'handle_ok_case') f3 = getattr(impl, 'handle_special_case') steps = extract_module_steps(impl) assert steps == [ ('entry_point', (f1, None, Union[MockData, str], False)), ('handle_ok_case', (f2, MockData, str, False)), ('handle_special_case', (f3, str, str, False)) ]
def test_extract_event_steps_with_shuffle(): impl = importlib.import_module('mock_app.mock_shuffle_event') f1 = getattr(impl, 'produce_messages') f2 = getattr(impl, 'consume_stream') f3 = getattr(impl, 'generate_default') steps = extract_module_steps(impl) assert steps == [ ('produce_messages', (f1, str, Spawn[MockData], True)), (SHUFFLE, None), ('consume_stream', (f2, MockData, Optional[MockResult], False)), ('generate_default', (f3, None, MockResult, False)) ]
def test_collector_steps_descriptor(mock_app_config): # noqa: F811 impl = find_event_handler(app_config=mock_app_config, event_name='mock_collector') steps = extract_module_steps(impl) collector: CollectorStepsDescriptor = steps[0][0] assert isinstance(collector, CollectorStepsDescriptor) assert collector.steps == [ ('step1', mock_collector.step1), ('step2', mock_collector.step2), ('step3', mock_collector.step3) ] assert collector.__name__ == "collector@step1" assert collector.input_type is MockData assert collector.step_names == ['step1', 'step2', 'step3']
def _generate_schemas(app_config: AppConfig, event_name: str) -> dict: """ Generate all schemas for a given event, based on steps signatures """ module = find_event_handler(app_config=app_config, event_name=event_name) steps = extract_module_steps(module) schemas: dict = {} for _, step_info in steps: _update_step_schemas(schemas, step_info) step_info = extract_postprocess_handler(module) _update_step_schemas(schemas, step_info) step_info = extract_preprocess_handler(module) _update_step_schemas(schemas, step_info) return schemas
def load_modules(self, effective_events: Dict[str, EventDescriptor]): for event_name, event_info in effective_events.items(): base_event, _ = event_and_step(event_name) module = find_event_handler(app_config=self.app_config, event_name=base_event) steps = extract_module_steps(module) self.modules[base_event] = (module, False, steps) self.preprocess_handlers[base_event] = extract_preprocess_handler( module) self.postprocess_handlers[ base_event] = extract_postprocess_handler(module) setup_app_logger(module, app_config=self.app_config, name=base_event, event_info=event_info) self.steps[event_name] = effective_steps(event_name, steps)
def load_modules(self, effective_events: Dict[str, EventDescriptor]): """ Load and setup event implementation modules. Set logger and load settings. """ for event_name, event_info in effective_events.items(): base_event, _ = event_and_step(event_name) module = find_event_handler(app_config=self.app_config, event_name=base_event, event_info=event_info) steps = extract_module_steps(module) self.modules[base_event] = (module, False, steps) self.preprocess_handlers[base_event] = extract_preprocess_handler( module) self.postprocess_handlers[ base_event] = extract_postprocess_handler(module) event_settings = get_event_settings(self.settings, event_name) setup_app_logger(module, app_config=self.app_config, name=base_event, event_settings=event_settings) self.steps[event_name] = effective_steps(event_name, steps)
def _find_stream_datatype_handlers(self, event_name: str) -> Dict[str, type]: """ Computes a dictionary of `{datatype name: datatype class}` that event steps can be handle when consuming from an stream. """ base_event, _ = event_and_step(event_name) impl = find_event_handler(app_config=self.app_config, event_name=base_event) all_steps = extract_module_steps(impl) steps = effective_steps(event_name, all_steps) datatypes = {} for _, step in steps.items(): _, datatype, _ = step if hasattr(datatype, '__stream_event__'): datatypes[datatype.__name__] = datatype if len(datatypes) == 0: raise NotImplementedError( f"No data types found to read from stream in even={event_name}. " "Dataclasses must be decorated with `@dataobject` to be used in streams" ) return datatypes