def test_s3_event_handler_with_s3_exception(create_event_handler, s3_get_object_mock, create_s3_event): handler, handler_args, logger = create_event_handler() event = create_s3_event(['my-json-key', 'my-key-with-exception']) with pytest.raises(Exception, match='my-key-with-exception'): handler(event, Context(function_name='my-lambda')) done = [it for it in EventLog.status_index.query(STATUS_DONE)] assert len(done) == 1 assert_similar( done[0].attribute_values, { 'function': 'my-lambda', 'gzip': False, 'region': 'no region set', 's3_bucket': 'my-bucket', 's3_key': 'my-json-key', 'status': STATUS_DONE, 'trace_id': re.compile(r'\w{32}'), }) failed = [it for it in EventLog.status_index.query(STATUS_FAILED)] assert len(failed) == 1 assert_similar( failed[0].attribute_values, { 'function': 'my-lambda', 'gzip': False, 'region': 'no region set', 's3_bucket': 'my-bucket', 's3_key': 'my-key-with-exception', 'status': STATUS_FAILED, 'error': 'my-key-with-exception', })
def test_s3_event_handler(create_event_handler, s3_get_object_mock, create_s3_event): handler, handler_args, _ = create_event_handler(parse_json=False) event = create_s3_event(['my-key']) handler(event, Context(function_name='my-lambda')) assert_similar(handler_args, [{ 'body': 'foo bar', 'record': { 's3': { 'bucket': { 'name': 'my-bucket' }, 'object': { 'key': 'my-key' } } }, 's3_obj': { 'Body': (type, StreamingBody), 'Metadata': {} } }]) s3_get_object_mock.assert_called_once_with(Bucket='my-bucket', Key='my-key')
def test_s3_event_handler_logging(create_event_handler, s3_get_object_mock, create_s3_event): handler, handler_args, logger = create_event_handler() event = create_s3_event( ['my-key-with-parse-error', 'my-json-key-with-trace']) handler(event, Context(function_name='my-lambda')) assert_similar(logger.method_calls, [ ('append_keys', ignore, { 's3_bucket': 'my-bucket', 's3_key': 'my-key-with-parse-error' }), ('debug', ('processing s3 event', )), ('append_keys', ignore, { 'traceId': re.compile(r'\w{32}') }), ('warning', ('processing s3 object failed: S3 object my-key-with-parse-error in bucket ' 'my-bucket is not valid Json: Expecting value: line 1 column 1 (char 0)', )), ('append_keys', ignore, { 's3_bucket': 'my-bucket', 's3_key': 'my-json-key-with-trace' }), ('debug', ('processing s3 event', )), ('append_keys', ignore, { 'traceId': TRACE_ID }), ('append_keys', ignore, { 'func_key': "{'foo': 'bar'}" }), ('info', ('processed s3 object', )) ])
def test_assert_similar_nested(): class C: def __init__(self): self.a = 1 self.b = 'b' actual = [{ 'foo': 'bar', 'amount': 0.1, 'ignored': (1, 2, 'c'), 'nested': [{ 'regex': '123 abc' }, 1, 'string', C()] }, (1, 2, 'c')] assert_similar(actual, [{ 'foo': 'bar', 'amount': pytest.approx(1 / 10), 'ignored': ignore, 'nested': [{ 'regex': re.compile(r'\d{3}\s\w{3}') }, 1, 'string', { 'a': 1, 'b': 'b' }] }, (1, 2, 'c')]) with pytest.raises( AssertionError, match=re.escape( 'found 5 differences:\n' 'expected[0].not: "present", actual[0].not: "None"\n' 'expected[0].nested[0]: "1", actual[0].nested[0]: "{\'regex\': \'123 abc\'}"\n' 'expected[0].nested[1]: "string", actual[0].nested[1]: "1"\n' 'expected[0].nested[2].a: "1", actual[0].nested[2].a: "None"\n' 'expected[0].nested[2].nope: "c", actual[0].nested[2].nope: "None"' )): assert_similar(actual, [{ 'foo': 'bar', 'amount': pytest.approx(1 / 10), 'ignored': ignore, 'not': 'present', 'nested': [1, 'string', { 'a': 1, 'nope': 'c' }], }, (1, 2, 'c')])
def test_with_event_log(event_log_table): with event_log('my-bucket', 'my-key', 'my-lambda', logger) as event_log_: event_log_.set_functional_key('funky', 'music') assert_similar( EventLog.get('my-key').attribute_values, { 'function': 'my-lambda', 's3_bucket': 'my-bucket', 's3_key': 'my-key', 'functional_key_name': 'funky', 'functional_key_value': 'music', 'gzip': False, 'region': 'no region set', 'status': 'DONE', })
def test_s3_event_handler_event_log(create_event_handler, s3_get_object_mock, create_s3_event): handler, handler_args, _ = create_event_handler() event = create_s3_event( ['my-key-with-parse-error', 'my-json-key-with-trace']) handler(event, Context(function_name='my-lambda')) done = [it for it in EventLog.status_index.query(STATUS_DONE)] assert len(done) == 1 assert_similar( done[0].attribute_values, { 'function': 'my-lambda', 'gzip': False, 'region': 'no region set', 's3_bucket': 'my-bucket', 's3_key': 'my-json-key-with-trace', 'status': STATUS_DONE, 'trace_id': TRACE_ID, }) failed = [it for it in EventLog.status_index.query(STATUS_FAILED)] assert len(failed) == 1 assert_similar( failed[0].attribute_values, { 'function': 'my-lambda', 'gzip': False, 'region': 'no region set', 's3_bucket': 'my-bucket', 's3_key': 'my-key-with-parse-error', 'status': STATUS_FAILED, 'error': 'S3 object my-key-with-parse-error in bucket my-bucket is not valid ' 'Json: Expecting value: line 1 column 1 (char 0)', })
def test_with_event_log_raising_exception(event_log_table): try: with event_log('my-bucket', 'my-error-key', 'my-lambda', logger) as event_log_: event_log_.set_functional_key('funky', 'music') raise ValueError('my-error') assert False except ValueError: assert True assert_similar( EventLog.get('my-error-key').attribute_values, { 'function': 'my-lambda', 's3_bucket': 'my-bucket', 's3_key': 'my-error-key', 'functional_key_name': 'funky', 'functional_key_value': 'music', 'gzip': False, 'region': 'no region set', 'status': 'FAILED', 'error': 'my-error', })
def test_assert_similar_list(): assert_similar([1, 'two', 3], [1, 'two', 3]) assert_similar([1, 'two', 3], [1, 'two']) with pytest.raises( AssertionError, match=re.escape( 'found 1 differences:\n' 'expected[2]: "3", actual[2]: "index 2 not found"')): assert_similar([1, 'two'], [1, 'two', 3]) # def test_assert_similar_dict(): assert_similar({1: 'one', 2: 'two'}, {1: 'one', 2: 'two'}) assert_similar({1: 'one', 2: 'two'}, {2: 'two'}) with pytest.raises(AssertionError, match=re.escape('found 1 differences:\n' 'expected.1: "one", actual.1: "None"')): assert_similar({2: 'two'}, {1: 'one', 2: 'two'})
def test_assert_similar_with_callable(): assert_similar([1, 'two', 3], [1, 'two', (str, '3')])