def test_from_json_malformed(): with pytest.raises(ValueError): Payload.from_json( '{"item1": {"id": "1", "value": "ok-1", "nested": {"ts": "1970-01-01T00:00:00+00:00"}', MockData) with pytest.raises(ValueError): Payload.from_json('BAD STRING', MockData)
def test_from_json_missing_fields(): with pytest.raises(ValueError): Payload.from_json('{"id": "1", "value": "ok-1", "nested": {}', MockData) with pytest.raises(ValueError): Payload.from_json( '{"value": 42, "nested": {"ts": "1970-01-01T00:00:00+00:00"}}', MockData)
def test_from_json_invalid_types(): with pytest.raises(ValueError): Payload.from_json( '{"id": "1", "value": "ok-1", "nested": {"ts": "INVALID DATE"}}', MockData) with pytest.raises(ValueError): Payload.from_json( '{"id": 42, "value": 42, "nested": {"ts": "1970-01-01T00:00:00+00:00"}}', MockData)
async def test_buffer_object_and_flush_signal(app_config, test_objs): # noqa: F811 test_save_path = app_config.settings["test_stream_batch_storage"]["path"] # Buffer single object test_obj = test_objs[0] result = await execute_event(app_config=app_config, event_name='test_stream_batch_storage', payload=test_obj) assert result is None # Send flush partition signal to force flush single object partition_key = test_obj.object_ts.strftime("%Y/%m/%d/%H") + '/' signal = FlushSignal(partition_key) result = await execute_event(app_config=app_config, event_name='test_stream_batch_storage', payload=signal) assert result is None # Load saved object and check is correct await asyncio.sleep(1) # Allow aiofiles to save saved_objects = {} for file_name in glob(f'{test_save_path}/{partition_key}/*.jsonlines'): with open(file_name) as f: for line in f: obj = Payload.from_json(line, datatype=MyObject) saved_objects[obj.object_id] = obj assert len(saved_objects) == 1 assert test_obj == saved_objects[test_obj.object_id]
def test_from_json_python_types(): assert Payload.from_json('{"value": "str"}', str) == "str" assert Payload.from_json('{"value": 123}', int) == int(123) assert Payload.from_json('{"value": 123.45}', float) == float(123.45) assert Payload.from_json('{"value": true}', bool) is True assert Payload.from_json('{"custom": "str"}', str, key='custom') == "str" assert Payload.from_json('{"test": "dict"}', dict) == {'test': 'dict'} assert Payload.from_json('["test1", "test2"]', set) == {'test2', 'test1'} assert Payload.from_json('["test1", "test2"]', list) == ['test1', 'test2']
def test_from_json_dataobject(): assert Payload.from_json( '{"id": "test", "value": "ok", "nested": {"ts": "1970-01-01T00:00:00+00:00"}}', MockData) == MockData( id='test', value='ok', nested=MockNested( ts=datetime.fromtimestamp(0.0).astimezone(timezone.utc)))
def _get_runtime_simple_example(url: str, source: str): with open(Path(__file__).parent / source) as f: res = f.read() res = res.replace("${HOST_NAME}", socket.gethostname()) res = res.replace("${PID}", str(os.getpid())) res = res.replace("${URL}", url) res = res.replace("${ENGINE_VERSION}", ENGINE_VERSION) res = res.replace("${APPS_API_VERSION}", APPS_API_VERSION) res = res.replace("${APPS_ROUTE_VERSION}", APPS_ROUTE_VERSION) result = Payload.from_json(res, RuntimeApps) return result
async def get(self, key: str, *, datatype: Type[DataObject]) -> Optional[DataObject]: """ Retrieves value under specified key, converted to datatype :param key: str :param datatype: dataclass implementing @dataobject (@see DataObject) :return: instance of datatype or None if not found """ assert self._conn payload_str = await self._conn.get(key) if payload_str: return Payload.from_json(payload_str, datatype) return None
async def _request_process_payload( context: EventContext, datatype: Optional[Type[EventPayloadType]], request: web.Request ) -> Tuple[Optional[EventPayloadType], Optional[bytes]]: """ Extract payload from request. Returns payload if parsing succeeded. Raises BadRequest if payload fails to parse """ try: payload_raw = await request.read() if datatype is not None: return Payload.from_json(payload_raw, datatype), payload_raw # type: ignore return None, payload_raw except ValueError as e: logger.error(context, e) raise BadRequest(e) from e
async def get( self, key: str, *, datatype: Type[DataObject], partition_key: Optional[str] = None ) -> Optional[DataObject]: """ Retrieves value under specified key, converted to datatype :param key: str :param datatype: dataclass implementing @dataobject (@see DataObject) :param parition_key: partition path to be appended to base path :return: instance of datatype or None if not found """ path = self.path / partition_key if partition_key else self.path payload_str = await self._load_file(path=path, file_name=key + SUFFIX) if payload_str: return Payload.from_json(payload_str, datatype) return None
def expected_log_entries() -> LogBatch: return Payload.from_json(""" { "entries": [ { "ts": "2021-06-02 18:01:44,290", "msg": "START", "app_name": "simple-example", "app_version": "${APPS_API_VERSION}", "event_name": "query_something", "event": "simple_example.${APP_VERSION}.query_something", "extra": { "track.operation_id": "f2659a30-5ac4-4dd4-b1f7-9a00db0bf7d5", "track.request_id": "7ee59fa7-c1e4-4a60-a79b-a25dbbd6cb82", "track.request_ts": "2021-06-02T18:01:44.289394+00:00", "track.caller": "test", "track.session_id": "test" }, "host": "host", "pid": "17031" }, { "ts": "2021-06-02 18:01:44,303", "msg": "DONE", "app_name": "simple-example", "app_version": "${APPS_API_VERSION}", "event_name": "query_something", "event": "simple_example.${APP_VERSION}.query_something", "extra": { "response.status": "404", "metrics.duration": "13.057", "track.operation_id": "f2659a30-5ac4-4dd4-b1f7-9a00db0bf7d5", "track.request_id": "7ee59fa7-c1e4-4a60-a79b-a25dbbd6cb82", "track.request_ts": "2021-06-02T18:01:44.289394+00:00", "track.caller": "test", "track.session_id": "test" }, "host": "host", "pid": "17031" } ] } """.replace("${APPS_API_VERSION}", APPS_API_VERSION).replace("${APP_VERSION}", APP_VERSION), LogBatch)
def post(self, url: str, data: str, headers: dict, params: dict) -> Union[MockResponseList, MockResponse]: self._check_headers(headers) host = self._host(url) self.call_log[host] += 1 if self.failure.get(host): return MockResponse(self.failure.get(host, 0), "Mock server error") if url in self.responses: payload = Payload.from_json(data, MockPayloadData) return MockResponseList( 200, items=[ MockResponseData( value=f"{payload.value} {self.responses[url]}", param=str(params.get("test_param", "")), host=host, log=self.call_log) ]) raise IOError("Test error")
async def test_buffer_objects_and_flush_partitions(app_config, test_objs): # noqa: F811 test_save_path = app_config.settings["test_stream_batch_storage"]["path"] # Buffer 10 objects that should create 5 partitions for test_obj in test_objs: result = await execute_event(app_config=app_config, event_name='test_stream_batch_storage', payload=test_obj) assert result is None await asyncio.sleep(1) # Allow aiofiles to save # Load saved data from disk and compare to input saved_objects = {} for file_name in glob(f'{test_save_path}/2020/05/01/**/*.jsonlines'): with open(file_name) as f: for line in f: obj = Payload.from_json(line, datatype=MyObject) saved_objects[obj.object_id] = obj assert len(saved_objects) == len(test_objs) for obj in test_objs: saved = saved_objects[obj.object_id] assert obj == saved
def test_from_json_dataobject_do_not_validate_missing(): data = '{"id": "test"}' assert Payload.from_json(data, MockDataDoNotValidate) \ == MockDataDoNotValidate(id='test', value=None) # type: ignore
def _deser_json_utf8(data: bytes, datatype: Type[EventPayloadType]) -> EventPayload: return Payload.from_json(data.decode('utf-8'), datatype)
def test_from_json_dataobject_validate_null(): data = '{"id": "test", "value": null}' with pytest.raises(ValueError): Payload.from_json(data, MockDataValidate)
def test_from_json_dataobject_validate_missing(): data = '{"id": "test"}' with pytest.raises(ValueError): Payload.from_json(data, MockDataValidate)