def test_firehose_s3(self): s3_resource = aws_stack.connect_to_resource("s3") firehose = aws_stack.connect_to_service("firehose") s3_prefix = "/testdata" test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ "RoleARN": aws_stack.iam_resource_arn("firehose"), "BucketARN": aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), "Prefix": s3_prefix, }, Tags=TEST_TAGS, ) self.assertTrue(stream) self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()["DeliveryStreamNames"]) tags = firehose.list_tags_for_delivery_stream(DeliveryStreamName=TEST_FIREHOSE_NAME) self.assertEqual(TEST_TAGS, tags["Tags"]) # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records firehose.put_record( DeliveryStreamName=TEST_FIREHOSE_NAME, Record={"Data": to_bytes(test_data)} ) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects) # check file layout in target bucket all_objects = testutil.map_all_s3_objects(buckets=[TEST_BUCKET_NAME]) for key in all_objects.keys(): self.assertRegex(key, r".*/\d{4}/\d{2}/\d{2}/\d{2}/.*\-\d{4}\-\d{2}\-\d{2}\-\d{2}.*")
def test_firehose_s3(): s3_resource = aws_stack.connect_to_resource('s3') firehose = aws_stack.connect_to_service('firehose') s3_prefix = '/testdata' test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), 'Prefix': s3_prefix } ) assert stream assert TEST_FIREHOSE_NAME in firehose.list_delivery_streams()['DeliveryStreamNames'] # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records firehose.put_record( DeliveryStreamName=TEST_FIREHOSE_NAME, Record={ 'Data': to_bytes(test_data) } ) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
def test_firehose_s3(env=ENV_DEV): s3_resource = aws_stack.connect_to_resource('s3', env=env) s3_client = aws_stack.connect_to_service('s3', env=env) firehose = aws_stack.connect_to_service('firehose', env=env) s3_prefix = '/testdata' bucket_name = 'test_bucket' test_data = b'{"test": "data123"}' # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(bucket_name), 'Prefix': s3_prefix } ) # create target S3 bucket s3_resource.create_bucket(Bucket=bucket_name) # put records firehose.put_record( DeliveryStreamName=TEST_FIREHOSE_NAME, Record={ 'Data': test_data } ) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(test_data), all_objects)
def test_firehose_s3(): s3_resource = aws_stack.connect_to_resource('s3') firehose = aws_stack.connect_to_service('firehose') s3_prefix = '/testdata' test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), 'Prefix': s3_prefix } ) assert stream assert TEST_FIREHOSE_NAME in firehose.list_delivery_streams()['DeliveryStreamNames'] # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records firehose.put_record( DeliveryStreamName=TEST_FIREHOSE_NAME, Record={ 'Data': to_bytes(test_data) } ) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
def test_firehose_kinesis_to_s3(self): kinesis = aws_stack.connect_to_service('kinesis') s3_resource = aws_stack.connect_to_resource('s3') firehose = aws_stack.connect_to_service('firehose') aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True) s3_prefix = '/testdata' test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamType='KinesisStreamAsSource', KinesisStreamSourceConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'KinesisStreamARN': aws_stack.kinesis_stream_arn(TEST_STREAM_NAME) }, DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), 'Prefix': s3_prefix } ) self.assertTrue(stream) self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()['DeliveryStreamNames']) # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records kinesis.put_record( Data=to_bytes(test_data), PartitionKey='testId', StreamName=TEST_STREAM_NAME ) time.sleep(3) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
def test_firehose_kinesis_to_s3(self): kinesis = aws_stack.create_external_boto_client("kinesis") s3_resource = aws_stack.connect_to_resource("s3") firehose = aws_stack.create_external_boto_client("firehose") aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True) s3_prefix = "/testdata" test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamType="KinesisStreamAsSource", KinesisStreamSourceConfiguration={ "RoleARN": aws_stack.iam_resource_arn("firehose"), "KinesisStreamARN": aws_stack.kinesis_stream_arn(TEST_STREAM_NAME), }, DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ "RoleARN": aws_stack.iam_resource_arn("firehose"), "BucketARN": aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), "Prefix": s3_prefix, }, ) self.assertTrue(stream) self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()["DeliveryStreamNames"]) # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records kinesis.put_record(Data=to_bytes(test_data), PartitionKey="testId", StreamName=TEST_STREAM_NAME) time.sleep(3) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
def test_firehose_s3(self): s3_resource = aws_stack.connect_to_resource('s3') firehose = aws_stack.connect_to_service('firehose') s3_prefix = '/testdata' test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), 'Prefix': s3_prefix }, Tags=TEST_TAGS ) self.assertTrue(stream) self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()['DeliveryStreamNames']) tags = firehose.list_tags_for_delivery_stream(DeliveryStreamName=TEST_FIREHOSE_NAME) self.assertEquals(TEST_TAGS, tags['Tags']) # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records firehose.put_record( DeliveryStreamName=TEST_FIREHOSE_NAME, Record={ 'Data': to_bytes(test_data) } ) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects) # check file layout in target bucket all_objects = testutil.map_all_s3_objects(buckets=[TEST_BUCKET_NAME]) for key in all_objects.keys(): self.assertRegexpMatches(key, r'.*/\d{4}/\d{2}/\d{2}/\d{2}/.*\-\d{4}\-\d{2}\-\d{2}\-\d{2}.*')
def test_put_events_with_target_firehose(self): s3_bucket = 's3-{}'.format(short_uid()) s3_prefix = 'testeventdata' stream_name = 'firehose-{}'.format(short_uid()) rule_name = 'rule-{}'.format(short_uid()) target_id = 'target-{}'.format(short_uid()) # create firehose target bucket s3_client = aws_stack.connect_to_service('s3') s3_client.create_bucket(Bucket=s3_bucket) # create firehose delivery stream to s3 firehose_client = aws_stack.connect_to_service('firehose') stream = firehose_client.create_delivery_stream( DeliveryStreamName=stream_name, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(s3_bucket), 'Prefix': s3_prefix }) stream_arn = stream['DeliveryStreamARN'] self.events_client.create_event_bus(Name=TEST_EVENT_BUS_NAME) self.events_client.put_rule( Name=rule_name, EventBusName=TEST_EVENT_BUS_NAME, EventPattern=json.dumps(TEST_EVENT_PATTERN)) rs = self.events_client.put_targets(Rule=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Targets=[{ 'Id': target_id, 'Arn': stream_arn }]) self.assertIn('FailedEntryCount', rs) self.assertIn('FailedEntries', rs) self.assertEqual(rs['FailedEntryCount'], 0) self.assertEqual(rs['FailedEntries'], []) self.events_client.put_events( Entries=[{ 'EventBusName': TEST_EVENT_BUS_NAME, 'Source': TEST_EVENT_PATTERN['Source'], 'DetailType': TEST_EVENT_PATTERN['DetailType'], 'Detail': TEST_EVENT_PATTERN['Detail'] }]) # run tests bucket_contents = s3_client.list_objects(Bucket=s3_bucket)['Contents'] self.assertEqual(len(bucket_contents), 1) key = bucket_contents[0]['Key'] s3_object = s3_client.get_object(Bucket=s3_bucket, Key=key) self.assertEqual((s3_object['Body'].read()).decode(), str(TEST_EVENT_PATTERN['Detail'])) # clean up firehose_client.delete_delivery_stream(DeliveryStreamName=stream_name) # empty and delete bucket s3_client.delete_object(Bucket=s3_bucket, Key=key) s3_client.delete_bucket(Bucket=s3_bucket) self.events_client.remove_targets(Rule=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Ids=[target_id], Force=True) self.events_client.delete_rule(Name=rule_name, EventBusName=TEST_EVENT_BUS_NAME, Force=True) self.events_client.delete_event_bus(Name=TEST_EVENT_BUS_NAME)
def test_put_events_with_target_firehose(self, events_client, s3_client, firehose_client): s3_bucket = "s3-{}".format(short_uid()) s3_prefix = "testeventdata" stream_name = "firehose-{}".format(short_uid()) rule_name = "rule-{}".format(short_uid()) target_id = "target-{}".format(short_uid()) bus_name = "bus-{}".format(short_uid()) # create firehose target bucket s3_client.create_bucket(Bucket=s3_bucket) # create firehose delivery stream to s3 stream = firehose_client.create_delivery_stream( DeliveryStreamName=stream_name, S3DestinationConfiguration={ "RoleARN": aws_stack.iam_resource_arn("firehose"), "BucketARN": aws_stack.s3_bucket_arn(s3_bucket), "Prefix": s3_prefix, }, ) stream_arn = stream["DeliveryStreamARN"] events_client.create_event_bus(Name=bus_name) events_client.put_rule( Name=rule_name, EventBusName=bus_name, EventPattern=json.dumps(TEST_EVENT_PATTERN), ) rs = events_client.put_targets( Rule=rule_name, EventBusName=bus_name, Targets=[{"Id": target_id, "Arn": stream_arn}], ) assert "FailedEntryCount" in rs assert "FailedEntries" in rs assert rs["FailedEntryCount"] == 0 assert rs["FailedEntries"] == [] events_client.put_events( Entries=[ { "EventBusName": bus_name, "Source": TEST_EVENT_PATTERN["source"][0], "DetailType": TEST_EVENT_PATTERN["detail-type"][0], "Detail": json.dumps(EVENT_DETAIL), } ] ) # run tests bucket_contents = s3_client.list_objects(Bucket=s3_bucket)["Contents"] assert len(bucket_contents) == 1 key = bucket_contents[0]["Key"] s3_object = s3_client.get_object(Bucket=s3_bucket, Key=key) actual_event = json.loads(s3_object["Body"].read().decode()) self.assert_valid_event(actual_event) assert actual_event["detail"] == EVENT_DETAIL # clean up firehose_client.delete_delivery_stream(DeliveryStreamName=stream_name) # empty and delete bucket s3_client.delete_object(Bucket=s3_bucket, Key=key) s3_client.delete_bucket(Bucket=s3_bucket) self.cleanup(bus_name, rule_name, target_id)