def test_kinesis_lambda_forward_chain(): kinesis = aws_stack.connect_to_service('kinesis') s3 = aws_stack.connect_to_service('s3') aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM1_NAME, delete=True) aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM2_NAME, delete=True) s3.create_bucket(Bucket=TEST_BUCKET_NAME) # deploy test lambdas connected to Kinesis streams zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True, libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27) testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA1_NAME, zip_file=zip_file, event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM1_NAME), runtime=LAMBDA_RUNTIME_PYTHON27) testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA2_NAME, zip_file=zip_file, event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM2_NAME), runtime=LAMBDA_RUNTIME_PYTHON27) # publish test record test_data = {'test_data': 'forward_chain_data_%s' % short_uid()} data = clone(test_data) data[lambda_integration.MSG_BODY_MESSAGE_TARGET] = 'kinesis:%s' % TEST_CHAIN_STREAM2_NAME kinesis.put_record(Data=to_bytes(json.dumps(data)), PartitionKey='testId', StreamName=TEST_CHAIN_STREAM1_NAME) # check results time.sleep(5) all_objects = testutil.list_all_s3_objects() testutil.assert_objects(test_data, all_objects)
def test_firehose_s3(): s3_resource = aws_stack.connect_to_resource('s3') firehose = aws_stack.connect_to_service('firehose') s3_prefix = '/testdata' test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), 'Prefix': s3_prefix } ) assert stream assert TEST_FIREHOSE_NAME in firehose.list_delivery_streams()['DeliveryStreamNames'] # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records firehose.put_record( DeliveryStreamName=TEST_FIREHOSE_NAME, Record={ 'Data': to_bytes(test_data) } ) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
def test_firehose_s3(self): s3_resource = aws_stack.connect_to_resource("s3") firehose = aws_stack.connect_to_service("firehose") s3_prefix = "/testdata" test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ "RoleARN": aws_stack.iam_resource_arn("firehose"), "BucketARN": aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), "Prefix": s3_prefix, }, Tags=TEST_TAGS, ) self.assertTrue(stream) self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()["DeliveryStreamNames"]) tags = firehose.list_tags_for_delivery_stream(DeliveryStreamName=TEST_FIREHOSE_NAME) self.assertEqual(TEST_TAGS, tags["Tags"]) # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records firehose.put_record( DeliveryStreamName=TEST_FIREHOSE_NAME, Record={"Data": to_bytes(test_data)} ) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects) # check file layout in target bucket all_objects = testutil.map_all_s3_objects(buckets=[TEST_BUCKET_NAME]) for key in all_objects.keys(): self.assertRegex(key, r".*/\d{4}/\d{2}/\d{2}/\d{2}/.*\-\d{4}\-\d{2}\-\d{2}\-\d{2}.*")
def test_firehose_s3(env=ENV_DEV): s3_resource = aws_stack.connect_to_resource('s3', env=env) s3_client = aws_stack.connect_to_service('s3', env=env) firehose = aws_stack.connect_to_service('firehose', env=env) s3_prefix = '/testdata' bucket_name = 'test_bucket' test_data = b'{"test": "data123"}' # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(bucket_name), 'Prefix': s3_prefix } ) # create target S3 bucket s3_resource.create_bucket(Bucket=bucket_name) # put records firehose.put_record( DeliveryStreamName=TEST_FIREHOSE_NAME, Record={ 'Data': test_data } ) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(test_data), all_objects)
def test_firehose_kinesis_to_s3(self): kinesis = aws_stack.connect_to_service('kinesis') s3_resource = aws_stack.connect_to_resource('s3') firehose = aws_stack.connect_to_service('firehose') aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True) s3_prefix = '/testdata' test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamType='KinesisStreamAsSource', KinesisStreamSourceConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'KinesisStreamARN': aws_stack.kinesis_stream_arn(TEST_STREAM_NAME) }, DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), 'Prefix': s3_prefix } ) self.assertTrue(stream) self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()['DeliveryStreamNames']) # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records kinesis.put_record( Data=to_bytes(test_data), PartitionKey='testId', StreamName=TEST_STREAM_NAME ) time.sleep(3) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
def test_firehose_kinesis_to_s3(self): kinesis = aws_stack.create_external_boto_client("kinesis") s3_resource = aws_stack.connect_to_resource("s3") firehose = aws_stack.create_external_boto_client("firehose") aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True) s3_prefix = "/testdata" test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamType="KinesisStreamAsSource", KinesisStreamSourceConfiguration={ "RoleARN": aws_stack.iam_resource_arn("firehose"), "KinesisStreamARN": aws_stack.kinesis_stream_arn(TEST_STREAM_NAME), }, DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ "RoleARN": aws_stack.iam_resource_arn("firehose"), "BucketARN": aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), "Prefix": s3_prefix, }, ) self.assertTrue(stream) self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()["DeliveryStreamNames"]) # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records kinesis.put_record(Data=to_bytes(test_data), PartitionKey="testId", StreamName=TEST_STREAM_NAME) time.sleep(3) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects)
def test_firehose_s3(self): s3_resource = aws_stack.connect_to_resource('s3') firehose = aws_stack.connect_to_service('firehose') s3_prefix = '/testdata' test_data = '{"test": "firehose_data_%s"}' % short_uid() # create Firehose stream stream = firehose.create_delivery_stream( DeliveryStreamName=TEST_FIREHOSE_NAME, S3DestinationConfiguration={ 'RoleARN': aws_stack.iam_resource_arn('firehose'), 'BucketARN': aws_stack.s3_bucket_arn(TEST_BUCKET_NAME), 'Prefix': s3_prefix }, Tags=TEST_TAGS ) self.assertTrue(stream) self.assertIn(TEST_FIREHOSE_NAME, firehose.list_delivery_streams()['DeliveryStreamNames']) tags = firehose.list_tags_for_delivery_stream(DeliveryStreamName=TEST_FIREHOSE_NAME) self.assertEquals(TEST_TAGS, tags['Tags']) # create target S3 bucket s3_resource.create_bucket(Bucket=TEST_BUCKET_NAME) # put records firehose.put_record( DeliveryStreamName=TEST_FIREHOSE_NAME, Record={ 'Data': to_bytes(test_data) } ) # check records in target bucket all_objects = testutil.list_all_s3_objects() testutil.assert_objects(json.loads(to_str(test_data)), all_objects) # check file layout in target bucket all_objects = testutil.map_all_s3_objects(buckets=[TEST_BUCKET_NAME]) for key in all_objects.keys(): self.assertRegexpMatches(key, r'.*/\d{4}/\d{2}/\d{2}/\d{2}/.*\-\d{4}\-\d{2}\-\d{2}\-\d{2}.*')
def check_results(): all_objects = testutil.list_all_s3_objects() testutil.assert_objects(test_data, all_objects)
def check_results(): LOGGER.debug("check results") all_objects = testutil.list_all_s3_objects() testutil.assert_objects(test_data, all_objects)