async def _handle_get_invocation(app_engine: AppEngine, impl: AppEngine, event_name: str, auth_types: List[AuthType], request: web.Request) -> ResponseType: """ Handler to execute GET calls """ context = None try: event_settings = get_event_settings(app_engine.settings, event_name) context = _request_start(app_engine, impl, event_name, event_settings, request) _validate_authorization(app_engine.app_config, context, auth_types, request) query_args = dict(request.query) payload = query_args.get('payload') if payload is not None: del query_args['payload'] hook: PreprocessHook[NoopMultiparReader] = PreprocessHook( headers=request.headers) return await _request_execute(impl, event_name, context, query_args, payload=payload, preprocess_hook=hook, request=request) except Unauthorized as e: return _ignored_response(context, 401, e) except BadRequest as e: return _ignored_response(context, 400, e) except Exception as e: # pylint: disable=broad-except return _failed_response(context, e)
def _setup_client_context(app_config: AppConfig, server_app_config: AppConfig, register_client_key: bool = True) -> EventContext: _init_engine_logger(app_config) assert app_config.server assert server_app_config.server app_config.server.auth = AuthConfig( secrets_location=f"/tmp/{uuid.uuid4()}", auth_passphrase='test_passphrase', enabled=True, create_keys=True ) auth.init(app_config.app_key(), app_config.server.auth) if register_client_key: os.rename( pathlib.Path(app_config.server.auth.secrets_location) / 'public' / f'{app_config.app_key()}_pub.pem', pathlib.Path(server_app_config.server.auth.secrets_location) / 'public' / f'{app_config.app_key()}_pub.pem' ) return EventContext( app_config=app_config, plugin_config=app_config, event_name='mock_event', settings=get_event_settings(app_config.effective_settings, 'mock_event'), # type: ignore track_ids={}, auth_info={} )
def create_test_context(app_config: AppConfig, event_name: str, track_ids: Optional[dict] = None, auth_info: Optional[dict] = None) -> EventContext: """ Creates an EventContext object to be used in tests :param app_config: AppConfig, app confioguration :param event_name: str, event_name to be called :param track_ids: dict, optional: addictional key/values to add to track_ids section. By default required track_ids will be generated with default test or empty values. :param auth_info: dict, optional: additional auth_info to inject. By default Unsecured auth_info will be generated. """ return EventContext( app_config=app_config, plugin_config=app_config, event_name=event_name, settings=get_event_settings(app_config.effective_settings, event_name), # type: ignore track_ids={ **{k: '' for k in app_config.engine.track_headers}, 'track.operation_id': 'test_operation_id', 'track.request_id': 'test_request_id', 'track.request_ts': datetime.now(tz=timezone.utc).isoformat(), **({} if track_ids is None else track_ids) }, auth_info={ 'auth_type': AuthType.UNSECURED, 'allowed': 'true', **({} if auth_info is None else auth_info) })
async def _handle_multipart_invocation(app_engine: AppEngine, impl: AppEngine, event_name: str, datatype: Optional[Type[DataObject]], auth_types: List[AuthType], request: web.Request) -> ResponseType: """ Handler to execute POST calls """ context = None try: event_settings = get_event_settings(app_engine.settings, event_name) context = _request_start(app_engine, impl, event_name, event_settings, request) query_args = dict(request.query) _validate_authorization(app_engine.app_config, context, auth_types, request) hook = PreprocessHook( # type: ignore headers=request.headers, multipart_reader=await request.multipart() # type: ignore ) return await _request_execute(impl, event_name, context, query_args, payload=None, preprocess_hook=hook, request=request) except Unauthorized as e: return _ignored_response(context, 401, e) except BadRequest as e: return _ignored_response(context, 400, e) except Exception as e: # pylint: disable=broad-except return _failed_response(context, e)
async def mock_handle_postprocess(app_config, *, payload, expected, expected_response): settings = get_event_settings(app_config.effective_settings, 'plugin_event') handler = EventHandler(app_config=app_config, plugins=[], effective_events=app_config.events, settings=app_config.effective_settings) context = EventContext(app_config=app_config, plugin_config=app_config, event_name='plugin_event', settings=settings, track_ids={}, auth_info={ 'auth_type': AuthType.UNSECURED, 'allowed': 'true' }) intermediate_result = None async for res in handler.handle_async_event(context=context, query_args={}, payload=None): intermediate_result = res hook = PostprocessHook() response = await handler.postprocess(context=context, payload=intermediate_result, response=hook) assert hook.headers == expected_response['headers'] assert hook.cookies == expected_response['cookies'] assert hook.status == expected_response['status'] assert response == expected
async def invoke_execute(engine: AppEngine, from_app: AppConfig, event_name: str, query_args: dict, payload: DataObject, expected: DataObject, track_ids: Dict[str, str], postprocess_expected: Optional[dict] = None): event_settings = get_event_settings(from_app.effective_settings, event_name) # type: ignore context = EventContext(app_config=from_app, plugin_config=engine.app_config, event_name=event_name, settings=event_settings, track_ids=track_ids, auth_info={ 'auth_type': AuthType.UNSECURED, 'allowed': 'true' }) res = await engine.execute(context=context, query_args=query_args, payload=payload) assert res == expected if postprocess_expected: hook = PostprocessHook() await engine.postprocess(context=context, payload=res, response=hook) assert hook.headers == postprocess_expected['headers'] assert hook.cookies == postprocess_expected['cookies'] assert hook.status == postprocess_expected['status']
def _get_app_logger(monkeypatch, mock_app_config): # noqa: F811 _patch_logger(monkeypatch) mock_event.logger = app_logger() settings = get_event_settings(mock_app_config.effective_settings, 'mock_event') setup_app_logger(mock_event, app_config=mock_app_config, name='mock_event', event_settings=settings) return mock_event.logger
def _event_context(mock_app_config, plugin_config): # noqa: F811 return EventContext(app_config=mock_app_config, plugin_config=plugin_config, event_name='login', settings=get_event_settings( plugin_config.effective_settings, "login"), track_ids={}, auth_info={ 'allowed': True, 'auth_type': AuthType.BASIC, 'payload': 'dGVzdDpwYXNz' })
def test_context() -> EventContext: app_config = AppConfig( app=AppDescriptor(name='test_steps', version='test_version'), events={'test_steps': EventDescriptor(type=EventType.POST)} ).setup() return EventContext( app_config=app_config, plugin_config=app_config, event_name='test_steps', settings=get_event_settings(app_config.effective_settings, 'test_steps'), track_ids={}, auth_info={} )
def test_metrics(mock_app_config): # noqa: F811 settings = get_event_settings(mock_app_config.effective_settings, 'mock_event') context = EventContext( app_config=mock_app_config, plugin_config=mock_app_config, event_name='mock_event', settings=settings, track_ids={}, auth_info={'auth_type': AuthType.UNSECURED, 'allowed': 'true'} ) context.creation_ts = ZERO_TS metrics.datetime = MockDatetime MockDatetime.ts = 3.0 result = metrics.metrics(context) assert result['extra'] == 'metrics.duration=3000.000'
async def test_async_collector(mock_app_config): # noqa: F811 settings = get_event_settings(mock_app_config.effective_settings, 'mock_event') context = EventContext(app_config=mock_app_config, plugin_config=mock_app_config, event_name='mock_event', settings=settings, track_ids={}, auth_info={}) collector = await AsyncCollector()\ .input("0")\ .steps(('step1', step1), ('step2', step2), ('step3', step3))\ .run(context) result = await collector['step3'] assert result == "((0+mock_event+step1)&(0+mock_event+step2)+mock_event+step3)"
def _event_context(mock_app_config): # noqa: F811 return EventContext(app_config=mock_app_config, plugin_config=mock_app_config, event_name='mock_event_logging', settings=get_event_settings( mock_app_config.effective_settings, 'mock_event_logging'), track_ids={ 'track.operation_id': 'test_operation_id', 'track.request_id': 'test_request_id', 'track.request_ts': '2020-01-01T00:00:00Z', 'track.session_id': 'test_session_id' }, auth_info={ 'auth_type': AuthType.UNSECURED, 'allowed': 'true' })
def _setup_server_context(app_config: AppConfig) -> EventContext: _init_engine_logger(app_config) assert app_config.server app_config.server.auth = AuthConfig( secrets_location=f"/tmp/{uuid.uuid4()}", auth_passphrase='test_passphrase', enabled=True, create_keys=True ) auth.init(app_config.app_key(), app_config.server.auth) return EventContext( app_config=app_config, plugin_config=app_config, event_name='mock_event', settings=get_event_settings(app_config.effective_settings, 'mock_event'), # type: ignore track_ids={}, auth_info={} )
def test_stream_metrics(mock_app_config): # noqa: F811 settings = get_event_settings(mock_app_config.effective_settings, 'mock_event') context = EventContext( app_config=mock_app_config, plugin_config=mock_app_config, event_name='mock_event', settings=settings, track_ids={ 'track.request_ts': ZERO_TS.isoformat() }, auth_info={'auth_type': AuthType.UNSECURED, 'allowed': 'true'} ) context.track_ids['stream.submit_ts'] = ONE_TS.isoformat() context.track_ids['stream.read_ts'] = TWO_TS.isoformat() context.creation_ts = ZERO_TS metrics.datetime = MockDatetime MockDatetime.ts = 3.0 result = metrics.stream_metrics(context) assert result['extra'] == 'metrics.stream_age=1000.000 | metrics.request_elapsed=3000.000'
def test_get_event_settings(mock_app_config): # noqa: F811 event_name = "mock_stream_event" settings = get_event_settings(mock_app_config.effective_settings, event_name) assert settings.response_timeout == 60.0 assert settings.logging == EventLoggingConfig( extra_fields=['value'], stream_fields=['stream.msg_id']) assert settings.stream == EventStreamConfig( timeout=60.0, target_max_len=0, throttle_ms=0, step_delay=0, batch_size=100, compression=Compression.LZ4, serialization=Serialization.JSON_BASE64) assert settings(datatype=CustomEventSettings) == CustomEventSettings( custom_setting=CustomSetting(custom="value")) assert settings(key="custom_extra_settings", datatype=CustomEventSettings) == CustomEventSettings( custom_setting=CustomSetting(custom="value"))
async def mock_handle_request_response_event(app_config, *, payload, expected): settings = get_event_settings(app_config.effective_settings, 'mock_post_event') handler = EventHandler(app_config=app_config, plugins=[], effective_events=app_config.events, settings=app_config.effective_settings) context = EventContext(app_config=app_config, plugin_config=app_config, event_name='mock_post_event', settings=settings, track_ids=MockStreamManager.test_track_ids, auth_info={ 'auth_type': AuthType.UNSECURED, 'allowed': 'true' }) async for response in handler.handle_async_event( context=context, query_args={"query_arg1": payload.value}, payload=payload): assert response == expected
def _event_context(mock_app_config, plugin_config): # noqa: F811 settings = get_event_settings(plugin_config.effective_settings, "logout") cfg = settings(key='auth', datatype=AuthSettings) iat = datetime.now(tz=timezone.utc) timeout = cfg.access_token_expiration return EventContext(app_config=mock_app_config, plugin_config=plugin_config, event_name='logout', settings=settings, track_ids={}, auth_info={ 'allowed': True, 'auth_type': AuthType.REFRESH, 'payload': { 'id': 'id', 'user': '******', 'email': 'test@email', 'iat': iat, 'exp': iat + timedelta(seconds=timeout) } })
async def mock_handle_spawn_event(app_config, *, payload, expected, stream_name): settings = get_event_settings(app_config.effective_settings, 'mock_spawn_event') handler = EventHandler(app_config=app_config, plugins=[], effective_events=app_config.events, settings=app_config.effective_settings) context = EventContext(app_config=app_config, plugin_config=app_config, event_name='mock_spawn_event', settings=settings, track_ids=MockStreamManager.test_track_ids, auth_info={ 'auth_type': AuthType.UNSECURED, 'allowed': 'true' }) event_count = 0 async for result in handler.handle_async_event(context=context, query_args={}, payload=payload.value): assert result.value.startswith(expected.value) event_count += 1 assert event_count == 3
def test_get_event_settings_split_event(mock_app_config): # noqa: F811 event_name = "mock_shuffle_event" settings = get_event_settings(mock_app_config.effective_settings, event_name) assert settings == get_event_settings(mock_app_config.effective_settings, "mock_shuffle_event$consume_stream")
async def service_loop( self, *, event_name: str, test_mode: bool = False ) -> Optional[Union[EventPayload, Exception]]: """ Service loop, executes `__service__` handler in event and execute event steps for each yielded payload. :param event_name: str, an event name contained in app_config :param test_mode: bool, set to True to immediately stop and return results for testing :return: last result or exception, only intended to be used in test_mode """ assert self.app_config.server is not None log_info = {'app_key': self.app_key, 'event_name': event_name} assert not self._running[event_name].locked( ), f"Cannot start service, event already running {event_name}" await self._running[event_name].acquire() wait = self.app_config.server.streams.delay_auto_start_seconds if wait > 0: wait = int(wait / 2) + random.randint(0, wait) - random.randint( 0, int(wait / 2)) logger.info(__name__, f"Start service: waiting seconds={wait}...", extra=extra(prefix='service.', **log_info)) await asyncio.sleep(wait) logger.info(__name__, "Starting service...", extra=extra(prefix='service.', **log_info)) event_config = self.effective_events[event_name] impl = find_event_handler(app_config=self.app_config, event_name=event_name, event_info=event_config) service_handler = getattr(impl, '__service__') assert service_handler is not None, \ f"{event_name} must implement method `__service__(context) -> Spawn[...]` to run as a service" event_settings = get_event_settings(self.settings, event_name) context = self._service_event_context(event_name=event_name, event_settings=event_settings) last_result = None if self._running[event_name].locked(): async for payload in service_handler(context): try: context = self._service_event_context( event_name=event_name, event_settings=event_settings, previous_context=context) logger.start(context, extra=extra(prefix='service.', **log_info)) last_result = await self.execute(context=context, query_args=None, payload=payload) logger.done(context, extra=extra(prefix='service.', **log_info)) if not self._running[event_name].locked(): logger.info(__name__, "Stopped service.", extra=extra(prefix='service.', **log_info)) break except CancelledError as e: logger.error(context, 'Cancelled', extra=extra(prefix='service.', **log_info)) logger.failed(context, extra=extra(prefix='service.', **log_info)) last_result = e except Exception as e: # pylint: disable=broad-except logger.error(context, e, extra=extra(prefix='service.', **log_info)) logger.failed(context, extra=extra(prefix='service.', **log_info)) last_result = e if test_mode: self._running[event_name].release() return last_result else: logger.info(__name__, "Stopped service.", extra=extra(prefix='service.', **log_info)) logger.info(__name__, "Finished service.", extra=extra(prefix='service.', **log_info)) return last_result
async def read_stream( self, *, event_name: str, test_mode: bool = False ) -> Optional[Union[EventPayload, Exception]]: """ Listens to a stream specified by event of type STREAM, and executes the event handler for each received event in the stream. When invoked, stream will be read continuously consuming events that are arriving, processing it according to configured steps. To interrupt listening for events, call `stop_event(event_name)` :param event_name: str, an event name contained in app_config :param test_mode: bool, set to True to immediately stop and return results for testing :return: last result or exception, only intended to be used in test_mode """ assert self.app_config.server is not None stats = StreamStats() log_info = {'app_key': self.app_key, 'event_name': event_name} wait = self.app_config.server.streams.delay_auto_start_seconds if wait > 0: wait = int(wait / 2) + random.randint(0, wait) - random.randint( 0, int(wait / 2)) logger.info(__name__, f"Start reading stream: waiting seconds={wait}...", extra=extra(prefix='stream.', **log_info)) await asyncio.sleep(wait) logger.info(__name__, "Starting reading stream...", extra=extra(prefix='stream.', **log_info)) try: assert self.event_handler, "event_handler not created. Call `start()`." assert self.stream_manager, "No active stream manager. Call `start()`" assert not self._running[event_name].locked( ), "Event already running. Call `stop_event(...)`" event_config = self.effective_events[event_name] stream_info = event_config.read_stream assert stream_info, f"No read_stream section in config for event={event_name}" event_settings = get_event_settings(self.settings, event_name) for queue in stream_info.queues: await self.stream_manager.ensure_consumer_group( stream_name=(f"{stream_info.name}.{queue}" if queue != StreamQueue.AUTO else stream_info.name), consumer_group=stream_info.consumer_group) datatypes = self._find_stream_datatype_handlers( event_name, event_config) log_info['name'] = stream_info.name log_info['consumer_group'] = stream_info.consumer_group assert not self._running[event_name].locked( ), f"Event already running {event_name}" await self._running[event_name].acquire() logger.info(__name__, "Consuming stream...", extra=extra(prefix='stream.', **log_info)) offset = '>' last_res, last_context, last_err = None, None, None while self._running[event_name].locked(): last_res, last_context, last_err = await self._read_stream_cycle( event_name, event_settings, stream_info, datatypes, offset, stats, log_info, test_mode, last_err) logger.info(__name__, 'Stopped read_stream.', extra=extra(prefix='stream.', **log_info)) if last_context is None: logger.warning(__name__, f"No stream events consumed in {event_name}") return last_res except (AssertionError, NotImplementedError) as e: logger.error(__name__, e) logger.error( __name__, f"Unexpectedly stopped read stream for event={event_name}") return e