def test_batcher_one_object(self): """Batcher enqueues a single S3 object.""" self._bucket.put_object(Body=b'Object 1', Key='key1') result = self.batcher_main.batch_lambda_handler({}, boto3_mocks.MockLambdaContext()) self.assertEqual(1, result) expected_sqs_msg = {'Records': [{'s3': {'object': {'key': 'key1'}}}]} self.assertEqual([expected_sqs_msg], self._sqs_messages())
def test_batcher_sqs_errors(self): """Verify SQS errors are logged and reported to CloudWatch.""" self.batcher_main.SQS_CLIENT = MockSQSErrorsClient() self._bucket.put_object(Body=b'Object 1', Key='key1') self._bucket.put_object(Body=b'Object 2', Key='key2') self._bucket.put_object(Body=b'Object 3', Key='key3') result = self.batcher_main.batch_lambda_handler({}, boto3_mocks.MockLambdaContext()) self.assertEqual(3, result)
def test_batcher_one_full_batch(self): """Batcher enqueues the configured maximum number of objects in a single SQS message.""" self._bucket.put_object(Body=b'Object 1', Key='key1') self._bucket.put_object(Body=b'Object 2', Key='key2') result = self.batcher_main.batch_lambda_handler({}, boto3_mocks.MockLambdaContext()) self.assertEqual(2, result) expected_sqs_msg = {'Records': [{'s3': {'object': {'key': 'key1'}}}, {'s3': {'object': {'key': 'key2'}}}]} self.assertEqual([expected_sqs_msg], self._sqs_messages())
def test_batcher_one_batch_plus_one(self): """Batcher enqueues more than 1 full batch; less than 2.""" self._bucket.put_object(Body=b'Object 1', Key='key1') self._bucket.put_object(Body=b'Object 2', Key='key2') self._bucket.put_object(Body=b'Object 3', Key='key3') result = self.batcher_main.batch_lambda_handler({}, boto3_mocks.MockLambdaContext()) self.assertEqual(3, result) expected_sqs_msgs = [ {'Records': [{'s3': {'object': {'key': 'key1'}}}, {'s3': {'object': {'key': 'key2'}}}]}, {'Records': [{'s3': {'object': {'key': 'key3'}}}]} ] self.assertEqual(expected_sqs_msgs, self._sqs_messages())
FILE_MODIFIED_TIME = 'test-last-modified' GOOD_FILE_CONTENTS = 'Hello, world!\n' GOOD_FILE_METADATA = {'filepath': 'win32'} GOOD_S3_OBJECT_KEY = 'space plus+file.test' EVIL_FILE_CONTENTS = 'Hello, evil world!\n' EVIL_FILE_METADATA = {'filepath': '/path/to/mock-evil.exe'} EVIL_S3_OBJECT_KEY = 'evil.exe' MOCK_DYNAMO_TABLE_NAME = 'mock-dynamo-table' MOCK_SNS_TOPIC_ARN = 's3:mock-sns-arn' MOCK_SQS_URL = 'https://sqs.mock.url' MOCK_SQS_RECEIPTS = ['sqs_receipt1', 'sqs_receipt2'] # Mimics minimal parts of S3:ObjectAdded event that triggers the lambda function. LAMBDA_VERSION = 1 TEST_CONTEXT = boto3_mocks.MockLambdaContext(LAMBDA_VERSION) class MockS3Object(object): """Simple mock for boto3.resource('s3').Object""" def __init__(self, bucket_name, object_key): self.name = bucket_name self.key = object_key def download_file(self, download_path): with open(download_path, 'w') as f: f.write(GOOD_FILE_CONTENTS if self.key == GOOD_S3_OBJECT_KEY else EVIL_FILE_CONTENTS) @property def last_modified(self):
def test_batcher_empty_bucket(self): """Batcher does nothing for an empty bucket.""" result = self.batcher_main.batch_lambda_handler({}, boto3_mocks.MockLambdaContext()) self.assertEqual(0, result) self.assertEqual([], self._sqs_messages())