def test_stream_logging(context, capsys): @stream def app(event, api, cache): Logger.warning('Hello, World!') event = RawStreamTimeEvent( records=[ RawTimeRecord( asset_id=0, company_id=int(), collection=str(), timestamp=int(), ), ], metadata=RawMetadata( app_stream_id=int(), apps={SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=1)}, log_type=LogType.time, ), ) with freezegun.freeze_time(datetime.datetime(2021, 1, 2, 3, 4, 5, 678910)): app([event.dict()], context) assert ( capsys.readouterr().out == f'2021-01-02T03:04:05.678Z {context.aws_request_id} WARNING ' f'ASSET={event.asset_id} AC={event.app_connection_id} | Hello, World!\n' )
def test_custom_log_handler(context, capsys): @stream(handler=logging.StreamHandler()) def app(event, api, cache): Logger.info('Info message!') event = RawStreamTimeEvent( records=[ RawTimeRecord( asset_id=0, company_id=int(), collection=str(), timestamp=int(), ), ], metadata=RawMetadata( app_stream_id=int(), apps={SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=1)}, log_type=LogType.time, ), ) app([event.dict()], context) captured = capsys.readouterr() assert captured.out.endswith('Info message!\n') assert captured.err == 'Info message!\n'
def test_max_message_count_reached(max_message_count, expected, mocker: MockerFixture, context, capsys): @stream def app(event, api, cache): Logger.warning('Hello, World!') event = RawStreamTimeEvent( records=[ RawTimeRecord( asset_id=0, company_id=int(), collection=str(), timestamp=int(), ), ], metadata=RawMetadata( app_stream_id=int(), apps={SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=1)}, log_type=LogType.time, ), ) mocker.patch.object(SETTINGS, 'LOG_THRESHOLD_MESSAGE_COUNT', max_message_count) with freezegun.freeze_time(datetime.datetime(2021, 1, 2, 3, 4, 5, 678910)): app([event.dict()], context) assert capsys.readouterr().out == expected
def test_custom_log_level(log_level, expected, context, capsys, mocker: MockerFixture): @stream def app(event, api, cache): Logger.debug('Debug message.') Logger.info('Info message.') event = RawStreamTimeEvent( records=[ RawTimeRecord( asset_id=0, company_id=int(), collection=str(), timestamp=int(), ), ], metadata=RawMetadata( app_stream_id=int(), apps={SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=1)}, log_type=LogType.time, ), ) mocker.patch.object(SETTINGS, 'LOG_LEVEL', log_level) with freezegun.freeze_time(datetime.datetime(2021, 1, 2, 3, 4, 5, 678910)): app([event.dict()], context) assert capsys.readouterr().out == expected
def test_set_cached_max_record_value_should_not_fail_lambda( mocker: MockerFixture, context): @stream def stream_app(event, api, cache): pass event = [ RawStreamTimeEvent( records=[ RawTimeRecord( collection=str(), timestamp=int(), asset_id=int(), company_id=int(), ) ], metadata=RawMetadata( app_stream_id=int(), apps={ SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=int()) }, log_type=LogType.time, ), ).dict() ] patch = mocker.patch.object(RawStreamEvent, 'set_cached_max_record_value', side_effect=Exception) stream_app(event, context) patch.assert_called_once()
def test_early_return_if_no_records_after_filtering(mocker: MockerFixture, context): @stream def stream_app(event, api, cache): pass event = [ RawStreamTimeEvent( records=[ RawTimeRecord( asset_id=int(), company_id=int(), collection=str(), timestamp=int(), ) ], metadata=RawMetadata( app_stream_id=int(), apps={ SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=int()) }, log_type=LogType.time, ), ).dict() ] filter_patch = mocker.patch.object(RawStreamEvent, 'filter_records', return_value=[]) spy = mocker.Mock(stream_app, wraps=stream_app) stream_app(event, context) filter_patch.assert_called_once() spy.assert_not_called()
def test_set_attr_in_raw_stream_event(attr, value, context, mocker: MockerFixture): @stream def stream_app(event, api, cache): return event event = [ RawStreamTimeEvent( records=[ RawTimeRecord( collection=str(), timestamp=int(), **{ 'asset_id': int(), 'company_id': int(), **{ attr: value } }, ) ], metadata=RawMetadata( app_stream_id=int(), apps={ SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=int()) }, log_type=LogType.time, ), ).dict() ] result_event: StreamEvent = stream_app(event, context)[0] assert getattr(result_event, attr) == value
def test_each_app_invoke_has_separate_logger(context, capsys, mocker: MockerFixture): @stream def app(event, api, cache): Logger.warning('Hello, World!') Logger.warning('This should not be printed as logging is disabled.') event = RawStreamTimeEvent( records=[ RawTimeRecord( asset_id=0, company_id=int(), collection=str(), timestamp=int(), ), ], metadata=RawMetadata( app_stream_id=int(), apps={SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=1)}, log_type=LogType.time, ), ) # disable filtering, as app won't be invoked the second time mocker.patch.object(RawStreamEvent, 'filter_records', return_value=event.records) # first app invoke will reach the limit. # so, if logger is shared, second invoke will log nothing. mocker.patch.object(SETTINGS, 'LOG_THRESHOLD_MESSAGE_COUNT', 1) with freezegun.freeze_time(datetime.datetime(2021, 1, 2, 3, 4, 5, 678910)): app([event.dict()] * 2, context) expected = ( '2021-01-02T03:04:05.678Z qwerty WARNING ASSET=0 AC=1 | Hello, World!\n' '2021-01-02T03:04:05.678Z qwerty WARNING ASSET=0 AC=1 | Disabling the ' 'logging as maximum number of logged messages was reached: 1.\n') * 2 assert capsys.readouterr().out == expected
def test_log_if_unable_to_set_cached_max_record_value(mocker: MockerFixture, context, capsys): @stream def stream_app(event, api, cache): pass event = [ RawStreamTimeEvent( records=[ RawTimeRecord( collection=str(), timestamp=int(), asset_id=int(), company_id=int(), ) ], metadata=RawMetadata( app_stream_id=int(), apps={ SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=int()) }, log_type=LogType.time, ), ).dict() ] patch = mocker.patch.object(RawStreamEvent, 'set_cached_max_record_value', side_effect=Exception) stream_app(event, context) captured = capsys.readouterr() assert 'ASSET=0 AC=0' in captured.out assert 'Could not save data to cache.' in captured.out patch.assert_called_once()
[ ( -1, [ RawStreamTimeEvent( records=[ RawTimeRecord( asset_id=int(), company_id=int(), collection=str(), timestamp=int(), ), RawTimeRecord( asset_id=int(), company_id=int(), collection='wits.completed', timestamp=int(), ), ], metadata=RawMetadata( app_stream_id=int(), apps={ SETTINGS.APP_KEY: RawAppMetadata(app_connection_id=int()) }, log_type=LogType.time, ), ).dict() ], [ RawTimeRecord( asset_id=int(),