Пример #1
0
def test_firehose_s3(env=ENV_DEV):

    s3_resource = aws_stack.connect_to_resource('s3', env=env)
    s3_client = aws_stack.connect_to_service('s3', env=env)
    firehose = aws_stack.connect_to_service('firehose', env=env)

    s3_prefix = '/testdata'
    bucket_name = 'test_bucket'
    test_data = b'{"test": "data123"}'
    # create Firehose stream
    stream = firehose.create_delivery_stream(
        DeliveryStreamName=TEST_FIREHOSE_NAME,
        S3DestinationConfiguration={
            'RoleARN': aws_stack.iam_resource_arn('firehose'),
            'BucketARN': aws_stack.s3_bucket_arn(bucket_name),
            'Prefix': s3_prefix
        }
    )
    # create target S3 bucket
    s3_resource.create_bucket(Bucket=bucket_name)

    # put records
    firehose.put_record(
        DeliveryStreamName=TEST_FIREHOSE_NAME,
        Record={
            'Data': test_data
        }
    )
    # check records in target bucket
    all_objects = testutil.list_all_s3_objects()
    testutil.assert_objects(json.loads(test_data), all_objects)
Пример #2
0
def test_kinesis_lambda_forward_chain():
    kinesis = aws_stack.connect_to_service('kinesis')
    s3 = aws_stack.connect_to_service('s3')

    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM1_NAME, delete=True)
    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM2_NAME, delete=True)
    s3.create_bucket(Bucket=TEST_BUCKET_NAME)

    # deploy test lambdas connected to Kinesis streams
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA1_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM1_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA2_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM2_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)

    # publish test record
    test_data = {'test_data': 'forward_chain_data_%s' % short_uid()}
    data = clone(test_data)
    data[lambda_integration.MSG_BODY_MESSAGE_TARGET] = 'kinesis:%s' % TEST_CHAIN_STREAM2_NAME
    kinesis.put_record(Data=to_bytes(json.dumps(data)), PartitionKey='testId', StreamName=TEST_CHAIN_STREAM1_NAME)

    # check results
    time.sleep(5)
    all_objects = testutil.list_all_s3_objects()
    testutil.assert_objects(test_data, all_objects)
Пример #3
0
def test_firehose_s3():

    s3_resource = aws_stack.connect_to_resource('s3')
    firehose = aws_stack.connect_to_service('firehose')

    s3_prefix = '/testdata'
    test_data = '{"test": "firehose_data_%s"}' % short_uid()
    # create Firehose stream
    stream = firehose.create_delivery_stream(
        DeliveryStreamName=TEST_FIREHOSE_NAME,
        S3DestinationConfiguration={
            'RoleARN': aws_stack.iam_resource_arn('firehose'),
            'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME),
            'Prefix': s3_prefix
        }
    )
    assert stream
    assert TEST_FIREHOSE_NAME in firehose.list_delivery_streams()['DeliveryStreamNames']
    # create target S3 bucket
    s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME)

    # put records
    firehose.put_record(
        DeliveryStreamName=TEST_FIREHOSE_NAME,
        Record={
            'Data': to_bytes(test_data)
        }
    )
    # check records in target bucket
    all_objects = testutil.list_all_s3_objects()
    testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
Пример #4
0
def test_kinesis_lambda_forward_chain():
    kinesis = aws_stack.connect_to_service('kinesis')
    s3 = aws_stack.connect_to_service('s3')

    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM1_NAME, delete=True)
    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM2_NAME, delete=True)
    s3.create_bucket(Bucket=TEST_BUCKET_NAME)

    # deploy test lambdas connected to Kinesis streams
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA1_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM1_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA2_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM2_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)

    # publish test record
    test_data = {'test_data': 'forward_chain_data_%s' % short_uid()}
    data = clone(test_data)
    data[lambda_integration.MSG_BODY_MESSAGE_TARGET] = 'kinesis:%s' % TEST_CHAIN_STREAM2_NAME
    kinesis.put_record(Data=to_bytes(json.dumps(data)), PartitionKey='testId', StreamName=TEST_CHAIN_STREAM1_NAME)

    # check results
    time.sleep(5)
    all_objects = testutil.list_all_s3_objects()
    testutil.assert_objects(test_data, all_objects)
Пример #5
0
    def test_firehose_s3(self):
        s3_resource = aws_stack.connect_to_resource("s3")
        firehose = aws_stack.connect_to_service("firehose")

        s3_prefix = "/testdata"
        test_data = '{"test": "firehose_data_%s"}' % short_uid()
        # create Firehose stream
        stream = firehose.create_delivery_stream(
            DeliveryStreamName=TEST_FIREHOSE_NAME,
            S3DestinationConfiguration={
                "RoleARN": aws_stack.iam_resource_arn("firehose"),
                "BucketARN": aws_stack.s3_bucket_arn(TEST_BUCKET_NAME),
                "Prefix": s3_prefix,
            },
            Tags=TEST_TAGS,
        )
        self.assertTrue(stream)
        self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()["DeliveryStreamNames"])
        tags = firehose.list_tags_for_delivery_stream(DeliveryStreamName=TEST_FIREHOSE_NAME)
        self.assertEqual(TEST_TAGS, tags["Tags"])
        # create target S3 bucket
        s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME)

        # put records
        firehose.put_record(
            DeliveryStreamName=TEST_FIREHOSE_NAME, Record={"Data": to_bytes(test_data)}
        )
        # check records in target bucket
        all_objects = testutil.list_all_s3_objects()
        testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
        # check file layout in target bucket
        all_objects = testutil.map_all_s3_objects(buckets=[TEST_BUCKET_NAME])
        for key in all_objects.keys():
            self.assertRegex(key, r".*/\d{4}/\d{2}/\d{2}/\d{2}/.*\-\d{4}\-\d{2}\-\d{2}\-\d{2}.*")
Пример #6
0
def test_firehose_s3():

    s3_resource = aws_stack.connect_to_resource('s3')
    firehose = aws_stack.connect_to_service('firehose')

    s3_prefix = '/testdata'
    test_data = '{"test": "firehose_data_%s"}' % short_uid()
    # create Firehose stream
    stream = firehose.create_delivery_stream(
        DeliveryStreamName=TEST_FIREHOSE_NAME,
        S3DestinationConfiguration={
            'RoleARN': aws_stack.iam_resource_arn('firehose'),
            'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME),
            'Prefix': s3_prefix
        }
    )
    assert stream
    assert TEST_FIREHOSE_NAME in firehose.list_delivery_streams()['DeliveryStreamNames']
    # create target S3 bucket
    s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME)

    # put records
    firehose.put_record(
        DeliveryStreamName=TEST_FIREHOSE_NAME,
        Record={
            'Data': to_bytes(test_data)
        }
    )
    # check records in target bucket
    all_objects = testutil.list_all_s3_objects()
    testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
Пример #7
0
def receive_assert_delete(queue_url, assertions, sqs_client=None):
    if not sqs_client:
        sqs_client = aws_stack.connect_to_service('sqs')

    response = sqs_client.receive_message(QueueUrl=queue_url)
    messages = [json.loads(to_str(m['Body'])) for m in response['Messages']]
    testutil.assert_objects(assertions, messages)
    for message in response['Messages']:
        sqs_client.delete_message(QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle'])
Пример #8
0
def receive_assert_delete(queue_url, assertions, sqs_client=None, required_subject=None):
    if not sqs_client:
        sqs_client = aws_stack.connect_to_service('sqs')

    response = sqs_client.receive_message(QueueUrl=queue_url)

    messages = [json.loads(to_str(m['Body'])) for m in response['Messages']]
    testutil.assert_objects(assertions, messages)
    for message in response['Messages']:
        sqs_client.delete_message(QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle'])
Пример #9
0
        def verify():
            response = sqs_client.receive_message(QueueUrl=queue_url)
            for message in response['Messages']:
                snsObj = json.loads(message['Body'])
                testutil.assert_object({'Subject': 'Amazon S3 Notification'},
                                       snsObj)
                notificationObj = json.loads(snsObj['Message'])
                testutil.assert_objects(
                    [{
                        'key': test_key2
                    }, {
                        'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS
                    }], notificationObj['Records'])

                sqs_client.delete_message(
                    QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle'])
Пример #10
0
    def _receive_assert_delete(self,
                               queue_url,
                               assertions,
                               sqs_client=None,
                               required_subject=None):
        if not sqs_client:
            sqs_client = aws_stack.create_external_boto_client("sqs")

        response = sqs_client.receive_message(QueueUrl=queue_url)

        messages = [
            json.loads(to_str(m["Body"])) for m in response["Messages"]
        ]
        testutil.assert_objects(assertions, messages)
        for message in response["Messages"]:
            sqs_client.delete_message(QueueUrl=queue_url,
                                      ReceiptHandle=message["ReceiptHandle"])
Пример #11
0
        def verify():
            response = sqs_client.receive_message(QueueUrl=queue_url)
            for message in response["Messages"]:
                sns_obj = json.loads(message["Body"])
                testutil.assert_object({"Subject": "Amazon S3 Notification"},
                                       sns_obj)
                notification_obj = json.loads(sns_obj["Message"])
                testutil.assert_objects(
                    [{
                        "key": test_key2
                    }, {
                        "name": TEST_BUCKET_NAME_WITH_NOTIFICATIONS
                    }],
                    notification_obj["Records"],
                )

                sqs_client.delete_message(
                    QueueUrl=queue_url, ReceiptHandle=message["ReceiptHandle"])
Пример #12
0
    def _receive_assert_delete(self,
                               queue_url,
                               assertions,
                               sqs_client=None,
                               required_subject=None):
        if not sqs_client:
            sqs_client = aws_stack.create_external_boto_client("sqs")

        response = sqs_client.receive_message(QueueUrl=queue_url,
                                              MessageAttributeNames=["All"],
                                              VisibilityTimeout=0)
        messages = []
        for m in response["Messages"]:
            message = json.loads(to_str(m["Body"]))
            messages.append(message)
        testutil.assert_objects(assertions, messages)
        for message in response["Messages"]:
            sqs_client.delete_message(QueueUrl=queue_url,
                                      ReceiptHandle=message["ReceiptHandle"])
Пример #13
0
    def test_firehose_kinesis_to_s3(self):
        kinesis = aws_stack.connect_to_service('kinesis')
        s3_resource = aws_stack.connect_to_resource('s3')
        firehose = aws_stack.connect_to_service('firehose')

        aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True)

        s3_prefix = '/testdata'
        test_data = '{"test": "firehose_data_%s"}' % short_uid()

        # create Firehose stream
        stream = firehose.create_delivery_stream(
            DeliveryStreamType='KinesisStreamAsSource',
            KinesisStreamSourceConfiguration={
                'RoleARN': aws_stack.iam_resource_arn('firehose'),
                'KinesisStreamARN': aws_stack.kinesis_stream_arn(TEST_STREAM_NAME)
            },
            DeliveryStreamName=TEST_FIREHOSE_NAME,
            S3DestinationConfiguration={
                'RoleARN': aws_stack.iam_resource_arn('firehose'),
                'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME),
                'Prefix': s3_prefix
            }
        )
        self.assertTrue(stream)
        self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()['DeliveryStreamNames'])

        # create target S3 bucket
        s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME)

        # put records
        kinesis.put_record(
            Data=to_bytes(test_data),
            PartitionKey='testId',
            StreamName=TEST_STREAM_NAME
        )

        time.sleep(3)

        # check records in target bucket
        all_objects = testutil.list_all_s3_objects()
        testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
Пример #14
0
    def test_firehose_kinesis_to_s3(self):
        kinesis = aws_stack.create_external_boto_client("kinesis")
        s3_resource = aws_stack.connect_to_resource("s3")
        firehose = aws_stack.create_external_boto_client("firehose")

        aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True)

        s3_prefix = "/testdata"
        test_data = '{"test": "firehose_data_%s"}' % short_uid()

        # create Firehose stream
        stream = firehose.create_delivery_stream(
            DeliveryStreamType="KinesisStreamAsSource",
            KinesisStreamSourceConfiguration={
                "RoleARN": aws_stack.iam_resource_arn("firehose"),
                "KinesisStreamARN":
                aws_stack.kinesis_stream_arn(TEST_STREAM_NAME),
            },
            DeliveryStreamName=TEST_FIREHOSE_NAME,
            S3DestinationConfiguration={
                "RoleARN": aws_stack.iam_resource_arn("firehose"),
                "BucketARN": aws_stack.s3_bucket_arn(TEST_BUCKET_NAME),
                "Prefix": s3_prefix,
            },
        )
        self.assertTrue(stream)
        self.assertIn(TEST_FIREHOSE_NAME,
                      firehose.list_delivery_streams()["DeliveryStreamNames"])

        # create target S3 bucket
        s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME)

        # put records
        kinesis.put_record(Data=to_bytes(test_data),
                           PartitionKey="testId",
                           StreamName=TEST_STREAM_NAME)

        time.sleep(3)

        # check records in target bucket
        all_objects = testutil.list_all_s3_objects()
        testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
Пример #15
0
def test_bucket_notifications():

    env = ENV_DEV
    s3_resource = aws_stack.connect_to_resource('s3', env=env)
    s3_client = aws_stack.connect_to_service('s3', env=env)
    sqs_client = aws_stack.connect_to_service('sqs', env=env)

    # create test bucket and queue
    s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
    sqs_client.create_queue(QueueName=TEST_QUEUE_NAME)

    # create notification on bucket
    queue_url = 'http://%s:%s/%s/%s' % (HOSTNAME, PORT_SQS,
                                        TEST_AWS_ACCOUNT_ID, TEST_QUEUE_NAME)
    s3_client.put_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS,
        NotificationConfiguration={
            'QueueConfigurations': [{
                'Id':
                'id123456',
                'QueueArn':
                queue_url,
                'Events': ['s3:ObjectCreated:*', 's3:ObjectRemoved:Delete']
            }]
        })

    # upload file to S3
    test_prefix = '/testdata'
    test_data = b'{"test": "bucket_notification"}'
    s3_client.upload_fileobj(BytesIO(test_data),
                             TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_prefix)

    # receive, assert, and delete message from SQS
    response = sqs_client.receive_message(QueueUrl=queue_url)
    messages = [json.loads(to_str(m['Body'])) for m in response['Messages']]
    testutil.assert_objects({'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS},
                            messages)
    for message in response['Messages']:
        sqs_client.delete_message(QueueUrl=queue_url,
                                  ReceiptHandle=message['ReceiptHandle'])
    def test_firehose_s3(self):

        s3_resource = aws_stack.connect_to_resource('s3')
        firehose = aws_stack.connect_to_service('firehose')

        s3_prefix = '/testdata'
        test_data = '{"test": "firehose_data_%s"}' % short_uid()
        # create Firehose stream
        stream = firehose.create_delivery_stream(
            DeliveryStreamName=TEST_FIREHOSE_NAME,
            S3DestinationConfiguration={
                'RoleARN': aws_stack.iam_resource_arn('firehose'),
                'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME),
                'Prefix': s3_prefix
            },
            Tags=TEST_TAGS
        )
        self.assertTrue(stream)
        self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()['DeliveryStreamNames'])
        tags = firehose.list_tags_for_delivery_stream(DeliveryStreamName=TEST_FIREHOSE_NAME)
        self.assertEquals(TEST_TAGS, tags['Tags'])
        # create target S3 bucket
        s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME)

        # put records
        firehose.put_record(
            DeliveryStreamName=TEST_FIREHOSE_NAME,
            Record={
                'Data': to_bytes(test_data)
            }
        )
        # check records in target bucket
        all_objects = testutil.list_all_s3_objects()
        testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
        # check file layout in target bucket
        all_objects = testutil.map_all_s3_objects(buckets=[TEST_BUCKET_NAME])
        for key in all_objects.keys():
            self.assertRegexpMatches(key, r'.*/\d{4}/\d{2}/\d{2}/\d{2}/.*\-\d{4}\-\d{2}\-\d{2}\-\d{2}.*')
Пример #17
0
 def check_results():
     all_objects = testutil.list_all_s3_objects()
     testutil.assert_objects(test_data, all_objects)
Пример #18
0
 def check_results():
     LOGGER.debug("check results")
     all_objects = testutil.list_all_s3_objects()
     testutil.assert_objects(test_data, all_objects)
Пример #19
0
def test_bucket_notifications():

    s3_resource = aws_stack.connect_to_resource('s3')
    s3_client = aws_stack.connect_to_service('s3')
    sqs_client = aws_stack.connect_to_service('sqs')

    # create test bucket and queue
    s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
    queue_info = sqs_client.create_queue(QueueName=TEST_QUEUE_NAME_FOR_S3)

    # create notification on bucket
    queue_url = queue_info['QueueUrl']
    queue_arn = aws_stack.sqs_queue_arn(TEST_QUEUE_NAME_FOR_S3)
    events = ['s3:ObjectCreated:*', 's3:ObjectRemoved:Delete']
    filter_rules = {
        'FilterRules': [{
            'Name': 'prefix',
            'Value': 'testupload/'
        }, {
            'Name': 'suffix',
            'Value': 'testfile.txt'
        }]
    }
    s3_client.put_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS,
        NotificationConfiguration={
            'QueueConfigurations': [{
                'Id': 'id123456',
                'QueueArn': queue_arn,
                'Events': events,
                'Filter': {
                    'Key': filter_rules
                }
            }]
        }
    )

    # retrieve and check notification config
    config = s3_client.get_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
    config = config['QueueConfigurations'][0]
    assert events == config['Events']
    assert filter_rules == config['Filter']['Key']

    # upload file to S3 (this should NOT trigger a notification)
    test_key1 = '/testdata'
    test_data1 = b'{"test": "bucket_notification1"}'
    s3_client.upload_fileobj(BytesIO(test_data1), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key1)

    # upload file to S3 (this should trigger a notification)
    test_key2 = 'testupload/dir1/testfile.txt'
    test_data2 = b'{"test": "bucket_notification2"}'
    s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2)

    # receive, assert, and delete message from SQS
    receive_assert_delete(queue_url, [{'key': test_key2}, {'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS}], sqs_client)

    # delete notification config
    _delete_notification_config()

    # put notification config with single event type
    event = 's3:ObjectCreated:*'
    s3_client.put_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS,
        NotificationConfiguration={
            'QueueConfigurations': [{
                'Id': 'id123456',
                'QueueArn': queue_arn,
                'Events': [event]
            }]
        }
    )
    config = s3_client.get_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
    config = config['QueueConfigurations'][0]
    assert config['Events'] == [event]

    # put notification config with single event type
    event = 's3:ObjectCreated:*'
    filter_rules = {
        'FilterRules': [{
            'Name': 'prefix',
            'Value': 'testupload/'
        }]
    }
    s3_client.put_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS,
        NotificationConfiguration={
            'QueueConfigurations': [{
                'Id': 'id123456',
                'QueueArn': queue_arn,
                'Events': [event],
                'Filter': {
                    'Key': filter_rules
                }
            }]
        }
    )
    config = s3_client.get_bucket_notification_configuration(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
    config = config['QueueConfigurations'][0]
    assert config['Events'] == [event]
    assert filter_rules == config['Filter']['Key']

    # upload file to S3 (this should trigger a notification)
    test_key2 = 'testupload/dir1/testfile.txt'
    test_data2 = b'{"test": "bucket_notification2"}'
    s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2)
    # receive, assert, and delete message from SQS
    receive_assert_delete(queue_url, [{'key': test_key2}, {'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS}], sqs_client)

    # delete notification config
    _delete_notification_config()

    #
    # Tests s3->sns->sqs notifications
    #
    sns_client = aws_stack.connect_to_service('sns')
    topic_info = sns_client.create_topic(Name=TEST_S3_TOPIC_NAME)

    s3_client.put_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS,
        NotificationConfiguration={
            'TopicConfigurations': [
                {
                    'Id': 'id123',
                    'Events': ['s3:ObjectCreated:*'],
                    'TopicArn': topic_info['TopicArn']
                }
            ]
        })

    sns_client.subscribe(TopicArn=topic_info['TopicArn'], Protocol='sqs', Endpoint=queue_arn)

    test_key2 = 'testupload/dir1/testfile.txt'
    test_data2 = b'{"test": "bucket_notification2"}'

    s3_client.upload_fileobj(BytesIO(test_data2), TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2)

    # verify subject and records

    response = sqs_client.receive_message(QueueUrl=queue_url)
    for message in response['Messages']:
        snsObj = json.loads(message['Body'])
        testutil.assert_object({'Subject': 'Amazon S3 Notification'}, snsObj)
        notificationObj = json.loads(snsObj['Message'])
#        notificationRecs = [ json.loads( rec_text ) for rec_text in notification
        testutil.assert_objects(
            [
                {'key': test_key2},
                {'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS}
            ], notificationObj['Records'])

        sqs_client.delete_message(QueueUrl=queue_url, ReceiptHandle=message['ReceiptHandle'])

    _delete_notification_config()
def test_bucket_notifications():

    s3_resource = aws_stack.connect_to_resource('s3')
    s3_client = aws_stack.connect_to_service('s3')
    sqs_client = aws_stack.connect_to_service('sqs')

    # create test bucket and queue
    s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
    queue_info = sqs_client.create_queue(QueueName=TEST_QUEUE_NAME_FOR_S3)

    # create notification on bucket
    queue_url = queue_info['QueueUrl']
    queue_arn = aws_stack.sqs_queue_arn(TEST_QUEUE_NAME_FOR_S3)
    events = ['s3:ObjectCreated:*', 's3:ObjectRemoved:Delete']
    filter_rules = {
        'FilterRules': [{
            'Name': 'prefix',
            'Value': 'testupload/'
        }, {
            'Name': 'suffix',
            'Value': 'testfile.txt'
        }]
    }
    s3_client.put_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS,
        NotificationConfiguration={
            'QueueConfigurations': [{
                'Id': 'id123456',
                'QueueArn': queue_arn,
                'Events': events,
                'Filter': {
                    'Key': filter_rules
                }
            }]
        })

    # retrieve and check notification config
    config = s3_client.get_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
    config = config['QueueConfigurations'][0]
    assert events == config['Events']
    assert filter_rules == config['Filter']['Key']

    # upload file to S3 (this should NOT trigger a notification)
    test_key1 = '/testdata'
    test_data1 = b'{"test": "bucket_notification1"}'
    s3_client.upload_fileobj(BytesIO(test_data1),
                             TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key1)

    # upload file to S3 (this should trigger a notification)
    test_key2 = 'testupload/dir1/testfile.txt'
    test_data2 = b'{"test": "bucket_notification2"}'
    s3_client.upload_fileobj(BytesIO(test_data2),
                             TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2)

    # receive, assert, and delete message from SQS
    receive_assert_delete(queue_url,
                          [{
                              'key': test_key2
                          }, {
                              'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS
                          }], sqs_client)

    # delete notification config
    _delete_notification_config()

    # put notification config with single event type
    event = 's3:ObjectCreated:*'
    s3_client.put_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS,
        NotificationConfiguration={
            'QueueConfigurations': [{
                'Id': 'id123456',
                'QueueArn': queue_arn,
                'Events': [event]
            }]
        })
    config = s3_client.get_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
    config = config['QueueConfigurations'][0]
    assert config['Events'] == [event]

    # put notification config with single event type
    event = 's3:ObjectCreated:*'
    filter_rules = {
        'FilterRules': [{
            'Name': 'prefix',
            'Value': 'testupload/'
        }]
    }
    s3_client.put_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS,
        NotificationConfiguration={
            'QueueConfigurations': [{
                'Id': 'id123456',
                'QueueArn': queue_arn,
                'Events': [event],
                'Filter': {
                    'Key': filter_rules
                }
            }]
        })
    config = s3_client.get_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS)
    config = config['QueueConfigurations'][0]
    assert config['Events'] == [event]
    assert filter_rules == config['Filter']['Key']

    # upload file to S3 (this should trigger a notification)
    test_key2 = 'testupload/dir1/testfile.txt'
    test_data2 = b'{"test": "bucket_notification2"}'
    s3_client.upload_fileobj(BytesIO(test_data2),
                             TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2)
    # receive, assert, and delete message from SQS
    receive_assert_delete(queue_url,
                          [{
                              'key': test_key2
                          }, {
                              'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS
                          }], sqs_client)

    # delete notification config
    _delete_notification_config()

    #
    # Tests s3->sns->sqs notifications
    #
    sns_client = aws_stack.connect_to_service('sns')
    topic_info = sns_client.create_topic(Name=TEST_S3_TOPIC_NAME)

    s3_client.put_bucket_notification_configuration(
        Bucket=TEST_BUCKET_NAME_WITH_NOTIFICATIONS,
        NotificationConfiguration={
            'TopicConfigurations': [{
                'Id': 'id123',
                'Events': ['s3:ObjectCreated:*'],
                'TopicArn': topic_info['TopicArn']
            }]
        })

    sns_client.subscribe(TopicArn=topic_info['TopicArn'],
                         Protocol='sqs',
                         Endpoint=queue_arn)

    test_key2 = 'testupload/dir1/testfile.txt'
    test_data2 = b'{"test": "bucket_notification2"}'

    s3_client.upload_fileobj(BytesIO(test_data2),
                             TEST_BUCKET_NAME_WITH_NOTIFICATIONS, test_key2)

    # verify subject and records

    response = sqs_client.receive_message(QueueUrl=queue_url)
    for message in response['Messages']:
        snsObj = json.loads(message['Body'])
        testutil.assert_object({'Subject': 'Amazon S3 Notification'}, snsObj)
        notificationObj = json.loads(snsObj['Message'])
        #        notificationRecs = [ json.loads( rec_text ) for rec_text in notification
        testutil.assert_objects([{
            'key': test_key2
        }, {
            'name': TEST_BUCKET_NAME_WITH_NOTIFICATIONS
        }], notificationObj['Records'])

        sqs_client.delete_message(QueueUrl=queue_url,
                                  ReceiptHandle=message['ReceiptHandle'])

    _delete_notification_config()