def test_to_json_invalid_types(): with pytest.raises(ValueError): Payload.to_json( MockData(id=42, value='ok-value', nested=MockNested(ts=datetime.now( tz=timezone.utc)))) # type: ignore with pytest.raises(ValueError): Payload.to_json( MockData(id='1', value='ok-value', nested=MockNested(ts="NOT A DATETIME"))) # type: ignore
async def store(self, key: str, value: DataObject, **kwargs): """ Stores value under specified key :param key: str :param value: DataObject, instance of dataclass annotated with @dataobject :param **kwargs: You can use arguments expected by the set method in the aioredis library i.e.: ex sets an expire flag on key name for ex seconds. px sets an expire flag on key name for px milliseconds. nx if set to True, set the value at key name to value only if it does not exist. xx if set to True, set the value at key name to value only if it already exists. keepttl if True, retain the time to live associated with the key. (Available since Redis 6.0). *These arguments may vary depending on the version of aioredis installed. i.e. store object: ``` redis_store.store(key='my_key', value=my_dataobject) ``` i.e. store object with kwargs option, adding `ex=60` redis set a ttl of 60 seconds for the object: ``` redis_store.store(key='my_key', value=my_dataobject, ex=60) ``` """ assert self._conn payload_str = str(Payload.to_json(value)) await self._conn.set(key, payload_str, **kwargs)
def test_to_json_dataobject(): assert Payload.to_json( MockData( id='test', value='ok', nested=MockNested(ts=datetime.fromtimestamp(0, tz=timezone.utc))) ) == '{"id": "test", "value": "ok", "nested": {"ts": "1970-01-01T00:00:00+00:00"}}'
def test_to_json_dict_dataobject(): assert json.loads( Payload.to_json({ 'item1': MockData(id='1', value='ok-1', nested=MockNested( ts=datetime.fromtimestamp(0, tz=timezone.utc))), 'item2': MockData(id='2', value='ok-2', nested=MockNested( ts=datetime.fromtimestamp(0, tz=timezone.utc))), })) == { "item1": { "id": "1", "value": "ok-1", "nested": { "ts": "1970-01-01T00:00:00+00:00" } }, "item2": { "id": "2", "value": "ok-2", "nested": { "ts": "1970-01-01T00:00:00+00:00" } } }
def _ignored_response(context: Optional[EventContext], status: int, e: BaseException) -> web.Response: if context: logger.error(context, e) logger.ignored(context) else: logger.error(__name__, e) info = ErrorInfo.from_exception(e) return web.Response(status=status, body=Payload.to_json(info))
def _failed_response(context: Optional[EventContext], e: Exception) -> web.Response: if context: logger.error(context, e) logger.failed(context) else: logger.error(__name__, e) info = ErrorInfo.from_exception(e) return web.Response(status=500, body=Payload.to_json(info))
async def _save_partition(partition_key: str, items: List[DataObject], context: EventContext): settings = context.settings(datatype=FileStorageSettings) path = Path(settings.path) / partition_key file = path / f"{uuid.uuid4()}{SUFFIX}" logger.info(context, f"Saving {file}...") os.makedirs(path.resolve(), exist_ok=True) async with aiofiles.open(file, 'w') as f: for item in items: await f.write(Payload.to_json(item) + "\n")
async def store(self, key: str, value: DataObject) -> str: """ Stores value under specified key :param key: str :param value: DataObject, instance of dataclass annotated with @dataobject :return: str, path where the object was stored """ payload_str = Payload.to_json(value) path = self.path if self.partition_dateformat: path = path / get_partition_key(value, self.partition_dateformat) os.makedirs(path.resolve().as_posix(), exist_ok=True) return await self._save_file(payload_str, path=path, file_name=key + SUFFIX)
def test_to_json_dict_mixed(): assert json.loads( Payload.to_json({ "item1": { 'item': 1, 'data': MockData(id='1', value='ok-1', nested=MockNested( ts=datetime.fromtimestamp(0, tz=timezone.utc))) }, "item2": { 'item': 2, 'data': MockData(id='2', value='ok-2', nested=MockNested( ts=datetime.fromtimestamp(0, tz=timezone.utc))) } })) == { 'item1': { 'data': { 'id': '1', 'nested': { 'ts': '1970-01-01T00:00:00+00:00' }, 'value': 'ok-1' }, 'item': 1 }, 'item2': { 'data': { 'id': '2', 'nested': { 'ts': '1970-01-01T00:00:00+00:00' }, 'value': 'ok-2' }, 'item': 2 } }
def test_to_json_list_dataobject(): assert json.loads( Payload.to_json([ MockData(id='1', value='ok-1', nested=MockNested( ts=datetime.fromtimestamp(0, tz=timezone.utc))), MockData(id='2', value='ok-2', nested=MockNested( ts=datetime.fromtimestamp(0, tz=timezone.utc))), ])) == [{ "id": "1", "value": "ok-1", "nested": { "ts": "1970-01-01T00:00:00+00:00" } }, { "id": "2", "value": "ok-2", "nested": { "ts": "1970-01-01T00:00:00+00:00" } }]
async def store_item(): redis = RedisStorage().connect(address=test_url) await redis.store(test_key, test_redis) assert test_key in redis._conn.items assert redis._conn.items[test_key] == Payload.to_json(test_redis)
async def call(self, event_name: str, *, datatype: Type[EventPayloadType], payload: Optional[EventPayload], context: EventContext, **kwargs) -> List[EventPayloadType]: """ Invokes event on external app linked in config `app_connections` section. Target event must also be configured in event `client` section :param: app_connection, str: name of configured app_connection to connect to :param: event_name, str: target event name to inoke, configured in events section :datatype: Type[EventPayloadType]: expected return type :payload: optional payload to send when calling external event :context: EventContext of current application **kwargs: any other argument to be sent as query args when calling event :return: datatype, returned data from invoked event, converted to datatype """ if self.conn_state is None or self.session is None: raise RuntimeError( "AppsClient not started: `client.start()` must be called from engine." ) now_ts = self._now_ts() event_info = self._get_event_connection(context, event_name) headers = { **self._request_headers(context), **self._auth_headers(context, now_ts=now_ts) } for retry_count in range(self.settings.retries + 1): host_index = self._next_available_host(self.conn_state, now_ts, context) host = self.conn_state.load_balancer.host(host_index) route = self.routes[event_name] url = host + route logger.info( context, f"{'Calling' if retry_count == 0 else 'Retrying call to'} external app...", extra=extra(app_connection=self.app_conn_key, event=event_name, url=url, retry_count=retry_count)) try: if event_info.type == EventConnectionType.GET: request_func = self.session.get(url, headers=headers, params=kwargs) return await self._request(request_func, context, datatype, event_name, host_index) if event_info.type == EventConnectionType.POST: request_func = self.session.post( url, headers=headers, data=Payload.to_json(payload), params=kwargs) return await self._request(request_func, context, datatype, event_name, host_index) raise NotImplementedError( f"Event type {event_info.type.value} not supported") except (ServerException, IOError) as e: self.conn_state.load_balancer.failure( host_index, now_ts, self.settings.circuit_breaker_open_failures, self.settings.circuit_breaker_failure_reset_seconds, self.settings.circuit_breaker_open_seconds) if retry_count == self.settings.retries: raise AppsClientException( f"Server or IO Error: {e} ({retry_count} retries)" ) from e logger.error(context, e) await asyncio.sleep(0.001 * self.settings.retry_backoff_ms) raise RuntimeError("Unexpected missing result after retry loop")
def _application_json_response(result: DataObject, key: str, *args, **kwargs) -> str: return Payload.to_json(result, key=key)
def _ser_json_utf8(data: EventPayload, level: int) -> bytes: return Payload.to_json(data).encode('utf-8')
def test_to_json_python_types(): assert Payload.to_json('str', key=None) == '"str"' assert Payload.to_json(123, key=None) == '123' assert Payload.to_json(123.45, key=None) == '123.45' assert Payload.to_json(True, key=None) == 'true' assert Payload.to_json('str', key='test') == '{"test": "str"}' assert Payload.to_json(123, key='test') == '{"test": 123}' assert Payload.to_json(123.45, key='test') == '{"test": 123.45}' assert Payload.to_json(True, key='test') == '{"test": true}' assert Payload.to_json({'test': 'dict'}) == '{"test": "dict"}' assert set(json.loads(Payload.to_json({'test2', 'test1'}))) == {"test1", "test2"} assert Payload.to_json(['test1', 'test2']) == '["test1", "test2"]'
async def store_item_extra_args(): redis = RedisStorage().connect(address=test_url) await redis.store(test_key, test_redis, ex=60) # ttl 60 secconds assert redis._conn.set_called_with == {'ex': 60} assert test_key in redis._conn.items assert redis._conn.items[test_key] == Payload.to_json(test_redis)
async def execute_event( app_config: AppConfig, event_name: str, payload: Optional[EventPayload], mocks: Optional[List[Callable[[ModuleType, EventContext], None]]] = None, *, fields: Optional[Dict[str, str]] = None, upload: Optional[Dict[str, bytes]] = None, preprocess: bool = False, postprocess: bool = False, context: Optional[EventContext] = None, **kwargs ) -> Union[Optional[EventPayload], List[EventPayload], Tuple[ Optional[EventPayload], EventPayload, PostprocessHook], Tuple[ List[EventPayload], EventPayload, PostprocessHook]]: """ Test executes an app event. Notice that event implementation file needs to be saved to disk since this will simulate execution similar to how engine actually execute events. Writing to stream will be ignored. :param app_config: AppConfig, load using `app_config = config('path/to/app-config.json')` :param event_name: str, name of the event / module to execute :param payload: test payload to send to initial step :param mocks: lists of functions to execute in order to mock functionality :param postprocess: enables testing __postprocess__ called with last step result or result before a SHUFFLE step if present. :param context: EventContext, optional EventContext to use when calling event. If not provided a default context will be created. :param kwargs: that will be forwarded to the initial step of the event :return: the results of executing the event, for simple events it will be a single object, for events with initial Spawn[...] the results will be collected as a list. If postprocess is true, a tuple of 3 elements is return, first element is results as described above, second element the output of call to __postprocess__, and third one a PostprocessHook with response information used during call to __postprocess__ """ async def _postprocess(hook: PostprocessHook, results: List[EventPayload]) -> EventPayload: assert context is not None pp_payload = results[-1] if len(results) > 0 else None return await handler.postprocess(context=context, payload=pp_payload, response=hook) async def _preprocess(hook: PreprocessHook, payload: EventPayload) -> EventPayload: assert context is not None return await handler.preprocess(context=context, query_args=kwargs, payload=payload, request=hook) if context is None: context = create_test_context(app_config, event_name) event_info = app_config.events[event_name] impl = find_event_handler(app_config=app_config, event_name=event_name, event_info=event_info) effective_events = { **split_event_stages(app_config.app, event_name, event_info, impl) } handler = EventHandler( app_config=app_config, plugins=[], effective_events=effective_events, settings=app_config.effective_settings # type: ignore ) preprocess_hook, postprocess_hook = None, None if preprocess: preprocess_hook = PreprocessHook( headers=CIMultiDictProxy(CIMultiDict()), multipart_reader=MockMultipartReader(fields or {}, upload or {}), # type: ignore file_hook_factory=MockFileHook, payload_raw=b'' if payload is None else Payload.to_json(payload).encode()) if postprocess: postprocess_hook = PostprocessHook() if mocks is not None: _apply_mocks(context, handler, event_name, effective_events, preprocess_hook, postprocess_hook, mocks) datatype = find_datatype_handler(app_config=app_config, event_name=event_name, event_info=event_info) if preprocess_hook: payload = await _preprocess(preprocess_hook, payload) if postprocess_hook and preprocess_hook.status is not None: postprocess_hook.set_status(preprocess_hook.status) elif datatype is None: if payload is not None: return (payload, payload, postprocess_hook) if postprocess else payload elif not (datatype is DataObject or isinstance(payload, datatype)): return (payload, payload, postprocess_hook) if postprocess else payload on_queue, pp_result, pp_called = [payload], None, False for effective_event_name, event_info in effective_events.items(): context = create_test_context(app_config, effective_event_name, track_ids=context.track_ids, auth_info=context.auth_info) stage_results = [] for elem in on_queue: async for res in handler.handle_async_event(context=context, query_args=kwargs, payload=elem): stage_results.append(res) on_queue = stage_results if len(stage_results) > 0 else on_queue if postprocess_hook and not pp_called: pp_called = True pp_result = await _postprocess(postprocess_hook, on_queue) kwargs = {} if postprocess: if len(on_queue) == 0: return None, pp_result, postprocess_hook if len(on_queue) == 1: return on_queue[0], pp_result, postprocess_hook return list(on_queue), pp_result, postprocess_hook if len(on_queue) == 0: return None if len(on_queue) == 1: return on_queue[0] return list(on_queue)
def test_to_json_dataobject_do_not_validate(): data = MockDataDoNotValidate(id='test', value='not-ok') # type: ignore assert Payload.to_json(data) == '{"id": "test", "value": "not-ok"}'
def test_to_json_dataobject_validate(): data = MockDataValidate(id='test', value='not-ok') # type: ignore with pytest.raises(ValueError): Payload.to_json(data)