Пример #1
0
def test_kinesis_lambda_forward_chain():
    kinesis = aws_stack.connect_to_service('kinesis')
    s3 = aws_stack.connect_to_service('s3')

    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM1_NAME, delete=True)
    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM2_NAME, delete=True)
    s3.create_bucket(Bucket=TEST_BUCKET_NAME)

    # deploy test lambdas connected to Kinesis streams
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA1_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM1_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA2_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM2_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)

    # publish test record
    test_data = {'test_data': 'forward_chain_data_%s' % short_uid()}
    data = clone(test_data)
    data[lambda_integration.MSG_BODY_MESSAGE_TARGET] = 'kinesis:%s' % TEST_CHAIN_STREAM2_NAME
    kinesis.put_record(Data=to_bytes(json.dumps(data)), PartitionKey='testId', StreamName=TEST_CHAIN_STREAM1_NAME)

    # check results
    time.sleep(5)
    all_objects = testutil.list_all_s3_objects()
    testutil.assert_objects(test_data, all_objects)
Пример #2
0
def test_kinesis_error_injection():
    if not do_run():
        return

    kinesis = aws_stack.connect_to_service('kinesis')
    aws_stack.create_kinesis_stream(TEST_STREAM_NAME)

    records = [
        {
            'Data': '0',
            'ExplicitHashKey': '0',
            'PartitionKey': '0'
        }
    ]

    # by default, no errors
    test_no_errors = kinesis.put_records(StreamName=TEST_STREAM_NAME, Records=records)
    assert_equal(test_no_errors['FailedRecordCount'], 0)

    # with a probability of 1, always throw errors
    config.KINESIS_ERROR_PROBABILITY = 1.0
    test_all_errors = kinesis.put_records(StreamName=TEST_STREAM_NAME, Records=records)
    assert_equal(test_all_errors['FailedRecordCount'], 1)

    # reset probability to zero
    config.KINESIS_ERROR_PROBABILITY = 0.0
Пример #3
0
def add_dynamodb_stream(table_name, view_type='NEW_AND_OLD_IMAGES', enabled=True):
    if enabled:
        # create kinesis stream as a backend
        stream_name = get_kinesis_stream_name(table_name)
        aws_stack.create_kinesis_stream(stream_name)
        stream = {
            'StreamArn': aws_stack.dynamodb_stream_arn(table_name=table_name),
            'TableName': table_name,
            'StreamLabel': 'TODO',
            'StreamStatus': 'ENABLED',
            'KeySchema': [],
            'Shards': []
        }
        table_arn = aws_stack.dynamodb_table_arn(table_name)
        DDB_STREAMS[table_arn] = stream
Пример #4
0
def test_api_gateway_kinesis_integration():
    # create target Kinesis stream
    aws_stack.create_kinesis_stream(TEST_STREAM_KINESIS_API_GW)

    # create API Gateway and connect it to the target stream
    result = connect_api_gateway_to_kinesis('test_gateway1', TEST_STREAM_KINESIS_API_GW)

    # generate test data
    test_data = {'records': [
        {'data': '{"foo": "bar1"}'},
        {'data': '{"foo": "bar2"}'},
        {'data': '{"foo": "bar3"}'}
    ]}

    url = INBOUND_GATEWAY_URL_PATTERN.format(api_id=result['id'],
        stage_name=TEST_STAGE_NAME, path=API_PATH_DATA_INBOUND)
    result = requests.post(url, data=json.dumps(test_data))
    result = json.loads(to_str(result.content))
    assert result['FailedRecordCount'] == 0
    assert len(result['Records']) == len(test_data['records'])
Пример #5
0
def test_kinesis_lambda_sns_ddb_streams():

    ddb_lease_table_suffix = '-kclapp'
    dynamodb = aws_stack.connect_to_resource('dynamodb')
    dynamodb_service = aws_stack.connect_to_service('dynamodb')
    dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams')
    kinesis = aws_stack.connect_to_service('kinesis')
    sns = aws_stack.connect_to_service('sns')

    LOGGER.info('Creating test streams...')
    run_safe(lambda: dynamodb_service.delete_table(TableName=TEST_STREAM_NAME +
                                                   ddb_lease_table_suffix),
             print_error=False)
    aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True)
    aws_stack.create_kinesis_stream(TEST_LAMBDA_SOURCE_STREAM_NAME)

    # subscribe to inbound Kinesis stream
    def process_records(records, shard_id):
        EVENTS.extend(records)

    # start the KCL client process in the background
    kinesis_connector.listen_to_kinesis(
        TEST_STREAM_NAME,
        listener_func=process_records,
        wait_until_started=True,
        ddb_lease_table_suffix=ddb_lease_table_suffix)

    LOGGER.info('Kinesis consumer initialized.')

    # create table with stream forwarding config
    testutil.create_dynamodb_table(TEST_TABLE_NAME,
                                   partition_key=PARTITION_KEY,
                                   stream_view_type='NEW_AND_OLD_IMAGES')

    # list DDB streams and make sure the table stream is there
    streams = dynamodbstreams.list_streams()
    ddb_event_source_arn = None
    for stream in streams['Streams']:
        if stream['TableName'] == TEST_TABLE_NAME:
            ddb_event_source_arn = stream['StreamArn']
    assert ddb_event_source_arn

    # deploy test lambda connected to DynamoDB Stream
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON),
                                              get_content=True,
                                              libs=TEST_LAMBDA_LIBS,
                                              runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_DDB,
                                    zip_file=zip_file,
                                    event_source_arn=ddb_event_source_arn,
                                    runtime=LAMBDA_RUNTIME_PYTHON27)
    # make sure we cannot create Lambda with same name twice
    assert_raises(Exception,
                  testutil.create_lambda_function,
                  func_name=TEST_LAMBDA_NAME_DDB,
                  zip_file=zip_file,
                  event_source_arn=ddb_event_source_arn,
                  runtime=LAMBDA_RUNTIME_PYTHON27)

    # deploy test lambda connected to Kinesis Stream
    kinesis_event_source_arn = kinesis.describe_stream(
        StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME
    )['StreamDescription']['StreamARN']
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_STREAM,
                                    zip_file=zip_file,
                                    event_source_arn=kinesis_event_source_arn,
                                    runtime=LAMBDA_RUNTIME_PYTHON27)

    # put items to table
    num_events_ddb = 15
    num_put_items = 7
    num_batch_items = 3
    num_updates_ddb = num_events_ddb - num_put_items - num_batch_items
    LOGGER.info('Putting %s items to table...' % num_events_ddb)
    table = dynamodb.Table(TEST_TABLE_NAME)
    for i in range(0, num_put_items):
        table.put_item(Item={
            PARTITION_KEY: 'testId%s' % i,
            'data': 'foobar123'
        })
    # batch write some items containing non-ASCII characters
    dynamodb.batch_write_item(
        RequestItems={
            TEST_TABLE_NAME: [{
                'PutRequest': {
                    'Item': {
                        PARTITION_KEY: short_uid(),
                        'data': 'foobar123 ✓'
                    }
                }
            }, {
                'PutRequest': {
                    'Item': {
                        PARTITION_KEY: short_uid(),
                        'data': 'foobar123 £'
                    }
                }
            }, {
                'PutRequest': {
                    'Item': {
                        PARTITION_KEY: short_uid(),
                        'data': 'foobar123 ¢'
                    }
                }
            }]
        })
    # update some items, which also triggers notification events
    for i in range(0, num_updates_ddb):
        dynamodb_service.update_item(
            TableName=TEST_TABLE_NAME,
            Key={PARTITION_KEY: {
                'S': 'testId%s' % i
            }},
            AttributeUpdates={
                'data': {
                    'Action': 'PUT',
                    'Value': {
                        'S': 'foobar123_updated'
                    }
                }
            })

    # put items to stream
    num_events_kinesis = 10
    LOGGER.info('Putting %s items to stream...' % num_events_kinesis)
    kinesis.put_records(Records=[{
        'Data': '{}',
        'PartitionKey': 'testId%s' % i
    } for i in range(0, num_events_kinesis)],
                        StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)

    # put 1 item to stream that will trigger an error in the Lambda
    kinesis.put_record(Data='{"%s": 1}' %
                       lambda_integration.MSG_BODY_RAISE_ERROR_FLAG,
                       PartitionKey='testIderror',
                       StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)

    # create SNS topic, connect it to the Lambda, publish test message
    num_events_sns = 3
    response = sns.create_topic(Name=TEST_TOPIC_NAME)
    sns.subscribe(
        TopicArn=response['TopicArn'],
        Protocol='lambda',
        Endpoint=aws_stack.lambda_function_arn(TEST_LAMBDA_NAME_STREAM))
    for i in range(0, num_events_sns):
        sns.publish(TopicArn=response['TopicArn'],
                    Message='test message %s' % i)

    # get latest records
    latest = aws_stack.kinesis_get_latest_records(
        TEST_LAMBDA_SOURCE_STREAM_NAME,
        shard_id='shardId-000000000000',
        count=10)
    assert len(latest) == 10

    LOGGER.info('Waiting some time before finishing test.')
    time.sleep(2)

    num_events = num_events_ddb + num_events_kinesis + num_events_sns

    def check_events():
        if len(EVENTS) != num_events:
            LOGGER.warning(
                ('DynamoDB and Kinesis updates retrieved ' +
                 '(actual/expected): %s/%s') % (len(EVENTS), num_events))
        assert len(EVENTS) == num_events

    # this can take a long time in CI, make sure we give it enough time/retries
    retry(check_events, retries=7, sleep=3)

    # check cloudwatch notifications
    stats1 = get_lambda_metrics(TEST_LAMBDA_NAME_STREAM)
    assert len(stats1['Datapoints']) == 2 + num_events_sns
    stats2 = get_lambda_metrics(TEST_LAMBDA_NAME_STREAM, 'Errors')
    assert len(stats2['Datapoints']) == 1
    stats3 = get_lambda_metrics(TEST_LAMBDA_NAME_DDB)
    assert len(stats3['Datapoints']) == num_events_ddb
Пример #6
0
def test_kinesis_lambda_ddb_streams():

    env = ENV_DEV
    ddb_lease_table_suffix = '-kclapp'
    dynamodb = aws_stack.connect_to_resource('dynamodb', env=env)
    dynamodb_service = aws_stack.connect_to_service('dynamodb', env=env)
    dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams', env=env)
    kinesis = aws_stack.connect_to_service('kinesis', env=env)

    print('Creating test streams...')
    run_safe(lambda: dynamodb_service.delete_table(TableName=TEST_STREAM_NAME +
                                                   ddb_lease_table_suffix),
             print_error=False)
    aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True)
    aws_stack.create_kinesis_stream(TEST_LAMBDA_SOURCE_STREAM_NAME)

    # subscribe to inbound Kinesis stream
    def process_records(records, shard_id):
        EVENTS.extend(records)

    # start the KCL client process in the background
    kinesis_connector.listen_to_kinesis(
        TEST_STREAM_NAME,
        listener_func=process_records,
        wait_until_started=True,
        ddb_lease_table_suffix=ddb_lease_table_suffix)

    print("Kinesis consumer initialized.")

    # create table with stream forwarding config
    testutil.create_dynamodb_table(TEST_TABLE_NAME,
                                   partition_key=PARTITION_KEY,
                                   env=env,
                                   stream_view_type='NEW_AND_OLD_IMAGES')

    # list DDB streams and make sure the table stream is there
    streams = dynamodbstreams.list_streams()
    ddb_event_source_arn = None
    for stream in streams['Streams']:
        if stream['TableName'] == TEST_TABLE_NAME:
            ddb_event_source_arn = stream['StreamArn']
    assert ddb_event_source_arn

    # deploy test lambda (Python) connected to DynamoDB Stream
    zip_file = testutil.create_lambda_archive(TEST_LAMBDA_PYTHON,
                                              get_content=True,
                                              libs=['localstack'],
                                              runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_DDB,
                                    zip_file=zip_file,
                                    event_source_arn=ddb_event_source_arn,
                                    runtime=LAMBDA_RUNTIME_PYTHON27)
    # make sure we cannot create Lambda with same name twice
    assert_raises(Exception,
                  testutil.create_lambda_function,
                  func_name=TEST_LAMBDA_NAME_DDB,
                  zip_file=zip_file,
                  event_source_arn=ddb_event_source_arn,
                  runtime=LAMBDA_RUNTIME_PYTHON27)

    # deploy test lambda (Python) connected to Kinesis Stream
    kinesis_event_source_arn = kinesis.describe_stream(
        StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME
    )['StreamDescription']['StreamARN']
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_STREAM_PY,
                                    zip_file=zip_file,
                                    event_source_arn=kinesis_event_source_arn,
                                    runtime=LAMBDA_RUNTIME_PYTHON27)

    # deploy test lambda (Java) connected to Kinesis Stream
    zip_file = testutil.create_zip_file(TEST_LAMBDA_JAVA, get_content=True)
    kinesis_event_source_arn = kinesis.describe_stream(
        StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME
    )['StreamDescription']['StreamARN']
    testutil.create_lambda_function(
        func_name=TEST_LAMBDA_NAME_STREAM_JAVA,
        zip_file=zip_file,
        event_source_arn=kinesis_event_source_arn,
        runtime=LAMBDA_RUNTIME_JAVA8,
        handler='com.atlassian.localstack.sample.KinesisHandler')

    if use_docker():
        # deploy test lambda (Node.js) connected to Kinesis Stream
        zip_file = testutil.create_lambda_archive(
            TEST_LAMBDA_NODEJS,
            get_content=True,
            runtime=LAMBDA_RUNTIME_NODEJS)
        kinesis_event_source_arn = kinesis.describe_stream(
            StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME
        )['StreamDescription']['StreamARN']
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_STREAM_JS,
            zip_file=zip_file,
            event_source_arn=kinesis_event_source_arn,
            runtime=LAMBDA_RUNTIME_NODEJS)

    # put items to table
    num_events_ddb = 10
    print('Putting %s items to table...' % num_events_ddb)
    table = dynamodb.Table(TEST_TABLE_NAME)
    for i in range(0, num_events_ddb):
        table.put_item(Item={
            PARTITION_KEY: 'testId%s' % i,
            'data': 'foobar123'
        })

    # put items to stream
    num_events_kinesis = 10
    print('Putting %s items to stream...' % num_events_kinesis)
    kinesis.put_records(Records=[{
        'Data': '{}',
        'PartitionKey': 'testId%s' % i
    } for i in range(0, num_events_kinesis)],
                        StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)

    # get latest records
    latest = aws_stack.kinesis_get_latest_records(
        TEST_LAMBDA_SOURCE_STREAM_NAME,
        shard_id='shardId-000000000000',
        count=10)
    assert len(latest) == 10

    print("Waiting some time before finishing test.")
    time.sleep(2)

    num_events = num_events_ddb + num_events_kinesis
    print('DynamoDB and Kinesis updates retrieved (actual/expected): %s/%s' %
          (len(EVENTS), num_events))
    assert len(EVENTS) == num_events
Пример #7
0
    def test_lambda_streams_batch_and_transactions(self):
        ddb_lease_table_suffix = "-kclapp2"
        table_name = TEST_TABLE_NAME + "lsbat" + ddb_lease_table_suffix
        stream_name = TEST_STREAM_NAME
        lambda_ddb_name = "lambda-ddb-%s" % short_uid()
        dynamodb = aws_stack.create_external_boto_client("dynamodb",
                                                         client=True)
        dynamodb_service = aws_stack.create_external_boto_client("dynamodb")
        dynamodbstreams = aws_stack.create_external_boto_client(
            "dynamodbstreams")

        LOGGER.info("Creating test streams...")
        run_safe(
            lambda: dynamodb_service.delete_table(TableName=stream_name +
                                                  ddb_lease_table_suffix),
            print_error=False,
        )
        aws_stack.create_kinesis_stream(stream_name, delete=True)

        events = []

        # subscribe to inbound Kinesis stream
        def process_records(records, shard_id):
            events.extend(records)

        # start the KCL client process in the background
        kinesis_connector.listen_to_kinesis(
            stream_name,
            listener_func=process_records,
            wait_until_started=True,
            ddb_lease_table_suffix=ddb_lease_table_suffix,
        )

        LOGGER.info("Kinesis consumer initialized.")

        # create table with stream forwarding config
        aws_stack.create_dynamodb_table(
            table_name,
            partition_key=PARTITION_KEY,
            stream_view_type="NEW_AND_OLD_IMAGES",
        )

        # list DDB streams and make sure the table stream is there
        streams = dynamodbstreams.list_streams()
        ddb_event_source_arn = None
        for stream in streams["Streams"]:
            if stream["TableName"] == table_name:
                ddb_event_source_arn = stream["StreamArn"]
        self.assertTrue(ddb_event_source_arn)

        # deploy test lambda connected to DynamoDB Stream
        testutil.create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON,
            libs=TEST_LAMBDA_LIBS,
            func_name=lambda_ddb_name,
            event_source_arn=ddb_event_source_arn,
            starting_position="TRIM_HORIZON",
            delete=True,
        )

        # submit a batch with writes
        dynamodb.batch_write_item(
            RequestItems={
                table_name: [
                    {
                        "PutRequest": {
                            "Item": {
                                PARTITION_KEY: {
                                    "S": "testId0"
                                },
                                "data": {
                                    "S": "foobar123"
                                },
                            }
                        }
                    },
                    {
                        "PutRequest": {
                            "Item": {
                                PARTITION_KEY: {
                                    "S": "testId1"
                                },
                                "data": {
                                    "S": "foobar123"
                                },
                            }
                        }
                    },
                    {
                        "PutRequest": {
                            "Item": {
                                PARTITION_KEY: {
                                    "S": "testId2"
                                },
                                "data": {
                                    "S": "foobar123"
                                },
                            }
                        }
                    },
                ]
            })

        # submit a batch with writes and deletes
        dynamodb.batch_write_item(
            RequestItems={
                table_name: [
                    {
                        "PutRequest": {
                            "Item": {
                                PARTITION_KEY: {
                                    "S": "testId3"
                                },
                                "data": {
                                    "S": "foobar123"
                                },
                            }
                        }
                    },
                    {
                        "PutRequest": {
                            "Item": {
                                PARTITION_KEY: {
                                    "S": "testId4"
                                },
                                "data": {
                                    "S": "foobar123"
                                },
                            }
                        }
                    },
                    {
                        "PutRequest": {
                            "Item": {
                                PARTITION_KEY: {
                                    "S": "testId5"
                                },
                                "data": {
                                    "S": "foobar123"
                                },
                            }
                        }
                    },
                    {
                        "DeleteRequest": {
                            "Key": {
                                PARTITION_KEY: {
                                    "S": "testId0"
                                }
                            }
                        }
                    },
                    {
                        "DeleteRequest": {
                            "Key": {
                                PARTITION_KEY: {
                                    "S": "testId1"
                                }
                            }
                        }
                    },
                    {
                        "DeleteRequest": {
                            "Key": {
                                PARTITION_KEY: {
                                    "S": "testId2"
                                }
                            }
                        }
                    },
                ]
            })

        # submit a transaction with writes and delete
        dynamodb.transact_write_items(TransactItems=[
            {
                "Put": {
                    "TableName": table_name,
                    "Item": {
                        PARTITION_KEY: {
                            "S": "testId6"
                        },
                        "data": {
                            "S": "foobar123"
                        },
                    },
                }
            },
            {
                "Put": {
                    "TableName": table_name,
                    "Item": {
                        PARTITION_KEY: {
                            "S": "testId7"
                        },
                        "data": {
                            "S": "foobar123"
                        },
                    },
                }
            },
            {
                "Put": {
                    "TableName": table_name,
                    "Item": {
                        PARTITION_KEY: {
                            "S": "testId8"
                        },
                        "data": {
                            "S": "foobar123"
                        },
                    },
                }
            },
            {
                "Delete": {
                    "TableName": table_name,
                    "Key": {
                        PARTITION_KEY: {
                            "S": "testId3"
                        }
                    },
                }
            },
            {
                "Delete": {
                    "TableName": table_name,
                    "Key": {
                        PARTITION_KEY: {
                            "S": "testId4"
                        }
                    },
                }
            },
            {
                "Delete": {
                    "TableName": table_name,
                    "Key": {
                        PARTITION_KEY: {
                            "S": "testId5"
                        }
                    },
                }
            },
        ])

        # submit a batch with a put over existing item
        dynamodb.transact_write_items(TransactItems=[
            {
                "Put": {
                    "TableName": table_name,
                    "Item": {
                        PARTITION_KEY: {
                            "S": "testId6"
                        },
                        "data": {
                            "S": "foobar123_updated1"
                        },
                    },
                }
            },
        ])

        # submit a transaction with a put over existing item
        dynamodb.transact_write_items(TransactItems=[
            {
                "Put": {
                    "TableName": table_name,
                    "Item": {
                        PARTITION_KEY: {
                            "S": "testId7"
                        },
                        "data": {
                            "S": "foobar123_updated1"
                        },
                    },
                }
            },
        ])

        # submit a transaction with updates
        dynamodb.transact_write_items(TransactItems=[
            {
                "Update": {
                    "TableName": table_name,
                    "Key": {
                        PARTITION_KEY: {
                            "S": "testId6"
                        }
                    },
                    "UpdateExpression": "SET #0 = :0",
                    "ExpressionAttributeNames": {
                        "#0": "data"
                    },
                    "ExpressionAttributeValues": {
                        ":0": {
                            "S": "foobar123_updated2"
                        }
                    },
                }
            },
            {
                "Update": {
                    "TableName": table_name,
                    "Key": {
                        PARTITION_KEY: {
                            "S": "testId7"
                        }
                    },
                    "UpdateExpression": "SET #0 = :0",
                    "ExpressionAttributeNames": {
                        "#0": "data"
                    },
                    "ExpressionAttributeValues": {
                        ":0": {
                            "S": "foobar123_updated2"
                        }
                    },
                }
            },
            {
                "Update": {
                    "TableName": table_name,
                    "Key": {
                        PARTITION_KEY: {
                            "S": "testId8"
                        }
                    },
                    "UpdateExpression": "SET #0 = :0",
                    "ExpressionAttributeNames": {
                        "#0": "data"
                    },
                    "ExpressionAttributeValues": {
                        ":0": {
                            "S": "foobar123_updated2"
                        }
                    },
                }
            },
        ])

        LOGGER.info("Waiting some time before finishing test.")
        time.sleep(2)

        num_insert = 9
        num_modify = 5
        num_delete = 6
        num_events = num_insert + num_modify + num_delete

        def check_events():
            if len(events) != num_events:
                msg = "DynamoDB updates retrieved (actual/expected): %s/%s" % (
                    len(events),
                    num_events,
                )
                LOGGER.warning(msg)
            self.assertEqual(num_events, len(events))
            event_items = [
                json.loads(base64.b64decode(e["data"])) for e in events
            ]
            # make sure the we have the right amount of expected event types
            inserts = [
                e for e in event_items if e.get("__action_type") == "INSERT"
            ]
            modifies = [
                e for e in event_items if e.get("__action_type") == "MODIFY"
            ]
            removes = [
                e for e in event_items if e.get("__action_type") == "REMOVE"
            ]
            self.assertEqual(num_insert, len(inserts))
            self.assertEqual(num_modify, len(modifies))
            self.assertEqual(num_delete, len(removes))

            # assert that all inserts were received

            for i, event in enumerate(inserts):
                self.assertNotIn("old_image", event)
                item_id = "testId%d" % i
                matching = [
                    i for i in inserts if i["new_image"]["id"] == item_id
                ][0]
                self.assertEqual({
                    "id": item_id,
                    "data": "foobar123"
                }, matching["new_image"])

            # assert that all updates were received

            def assert_updates(expected_updates, modifies):
                def found(update):
                    for modif in modifies:
                        if modif["old_image"]["id"] == update["id"]:
                            self.assertEqual(
                                modif["old_image"],
                                {
                                    "id": update["id"],
                                    "data": update["old"]
                                },
                            )
                            self.assertEqual(
                                modif["new_image"],
                                {
                                    "id": update["id"],
                                    "data": update["new"]
                                },
                            )
                            return True

                for update in expected_updates:
                    self.assertTrue(found(update))

            updates1 = [
                {
                    "id": "testId6",
                    "old": "foobar123",
                    "new": "foobar123_updated1"
                },
                {
                    "id": "testId7",
                    "old": "foobar123",
                    "new": "foobar123_updated1"
                },
            ]
            updates2 = [
                {
                    "id": "testId6",
                    "old": "foobar123_updated1",
                    "new": "foobar123_updated2",
                },
                {
                    "id": "testId7",
                    "old": "foobar123_updated1",
                    "new": "foobar123_updated2",
                },
                {
                    "id": "testId8",
                    "old": "foobar123",
                    "new": "foobar123_updated2"
                },
            ]

            assert_updates(updates1, modifies[:2])
            assert_updates(updates2, modifies[2:])

            # assert that all removes were received

            for i, event in enumerate(removes):
                self.assertNotIn("new_image", event)
                item_id = "testId%d" % i
                matching = [
                    i for i in removes if i["old_image"]["id"] == item_id
                ][0]
                self.assertEqual({
                    "id": item_id,
                    "data": "foobar123"
                }, matching["old_image"])

        # this can take a long time in CI, make sure we give it enough time/retries
        retry(check_events, retries=30, sleep=4)

        # clean up
        testutil.delete_lambda_function(lambda_ddb_name)
Пример #8
0
    def test_kinesis_lambda_sns_ddb_sqs_streams(self):
        ddb_lease_table_suffix = '-kclapp'
        dynamodb = aws_stack.connect_to_resource('dynamodb')
        dynamodb_service = aws_stack.connect_to_service('dynamodb')
        dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams')
        kinesis = aws_stack.connect_to_service('kinesis')
        sns = aws_stack.connect_to_service('sns')
        sqs = aws_stack.connect_to_service('sqs')

        LOGGER.info('Creating test streams...')
        run_safe(lambda: dynamodb_service.delete_table(
            TableName=TEST_STREAM_NAME + ddb_lease_table_suffix),
                 print_error=False)
        aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True)
        aws_stack.create_kinesis_stream(TEST_LAMBDA_SOURCE_STREAM_NAME)

        # subscribe to inbound Kinesis stream
        def process_records(records, shard_id):
            EVENTS.extend(records)

        # start the KCL client process in the background
        kinesis_connector.listen_to_kinesis(
            TEST_STREAM_NAME,
            listener_func=process_records,
            wait_until_started=True,
            ddb_lease_table_suffix=ddb_lease_table_suffix)

        LOGGER.info('Kinesis consumer initialized.')

        # create table with stream forwarding config
        aws_stack.create_dynamodb_table(TEST_TABLE_NAME,
                                        partition_key=PARTITION_KEY,
                                        stream_view_type='NEW_AND_OLD_IMAGES')

        # list DDB streams and make sure the table stream is there
        streams = dynamodbstreams.list_streams()
        ddb_event_source_arn = None
        for stream in streams['Streams']:
            if stream['TableName'] == TEST_TABLE_NAME:
                ddb_event_source_arn = stream['StreamArn']
        self.assertTrue(ddb_event_source_arn)

        # deploy test lambda connected to DynamoDB Stream
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON27)
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_DDB,
                                        zip_file=zip_file,
                                        event_source_arn=ddb_event_source_arn,
                                        runtime=LAMBDA_RUNTIME_PYTHON27)
        # make sure we cannot create Lambda with same name twice
        assert_raises(Exception,
                      testutil.create_lambda_function,
                      func_name=TEST_LAMBDA_NAME_DDB,
                      zip_file=zip_file,
                      event_source_arn=ddb_event_source_arn,
                      runtime=LAMBDA_RUNTIME_PYTHON27)

        # deploy test lambda connected to Kinesis Stream
        kinesis_event_source_arn = kinesis.describe_stream(
            StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME
        )['StreamDescription']['StreamARN']
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_STREAM,
            zip_file=zip_file,
            event_source_arn=kinesis_event_source_arn,
            runtime=LAMBDA_RUNTIME_PYTHON27)

        # deploy test lambda connected to SQS queue
        sqs_queue_info = testutil.create_sqs_queue(TEST_LAMBDA_NAME_QUEUE)
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_QUEUE,
            zip_file=zip_file,
            event_source_arn=sqs_queue_info['QueueArn'],
            runtime=LAMBDA_RUNTIME_PYTHON27)

        # set number of items to update/put to table
        num_events_ddb = 15
        num_put_new_items = 5
        num_put_existing_items = 2
        num_batch_items = 3
        num_updates_ddb = num_events_ddb - num_put_new_items - num_put_existing_items - num_batch_items

        LOGGER.info('Putting %s items to table...' % num_events_ddb)
        table = dynamodb.Table(TEST_TABLE_NAME)
        for i in range(0, num_put_new_items):
            table.put_item(Item={
                PARTITION_KEY: 'testId%s' % i,
                'data': 'foobar123'
            })
        # Put items with an already existing ID (fix https://github.com/localstack/localstack/issues/522)
        for i in range(0, num_put_existing_items):
            table.put_item(Item={
                PARTITION_KEY: 'testId%s' % i,
                'data': 'foobar123_put_existing'
            })

        # batch write some items containing non-ASCII characters
        dynamodb.batch_write_item(
            RequestItems={
                TEST_TABLE_NAME: [{
                    'PutRequest': {
                        'Item': {
                            PARTITION_KEY: short_uid(),
                            'data': 'foobar123 ✓'
                        }
                    }
                }, {
                    'PutRequest': {
                        'Item': {
                            PARTITION_KEY: short_uid(),
                            'data': 'foobar123 £'
                        }
                    }
                }, {
                    'PutRequest': {
                        'Item': {
                            PARTITION_KEY: short_uid(),
                            'data': 'foobar123 ¢'
                        }
                    }
                }]
            })
        # update some items, which also triggers notification events
        for i in range(0, num_updates_ddb):
            dynamodb_service.update_item(
                TableName=TEST_TABLE_NAME,
                Key={PARTITION_KEY: {
                    'S': 'testId%s' % i
                }},
                AttributeUpdates={
                    'data': {
                        'Action': 'PUT',
                        'Value': {
                            'S': 'foobar123_updated'
                        }
                    }
                })

        # put items to stream
        num_events_kinesis = 10
        LOGGER.info('Putting %s items to stream...' % num_events_kinesis)
        kinesis.put_records(Records=[{
            'Data': '{}',
            'PartitionKey': 'testId%s' % i
        } for i in range(0, num_events_kinesis)],
                            StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)

        # put 1 item to stream that will trigger an error in the Lambda
        kinesis.put_record(Data='{"%s": 1}' %
                           lambda_integration.MSG_BODY_RAISE_ERROR_FLAG,
                           PartitionKey='testIderror',
                           StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)

        # create SNS topic, connect it to the Lambda, publish test messages
        num_events_sns = 3
        response = sns.create_topic(Name=TEST_TOPIC_NAME)
        sns.subscribe(
            TopicArn=response['TopicArn'],
            Protocol='lambda',
            Endpoint=aws_stack.lambda_function_arn(TEST_LAMBDA_NAME_STREAM))
        for i in range(0, num_events_sns):
            sns.publish(TopicArn=response['TopicArn'],
                        Message='test message %s' % i)

        # get latest records
        latest = aws_stack.kinesis_get_latest_records(
            TEST_LAMBDA_SOURCE_STREAM_NAME,
            shard_id='shardId-000000000000',
            count=10)
        self.assertEqual(len(latest), 10)

        # send messages to SQS queue
        num_events_sqs = 4
        for i in range(num_events_sqs):
            sqs.send_message(QueueUrl=sqs_queue_info['QueueUrl'],
                             MessageBody=str(i))

        LOGGER.info('Waiting some time before finishing test.')
        time.sleep(2)

        num_events_lambda = num_events_ddb + num_events_sns + num_events_sqs
        num_events = num_events_lambda + num_events_kinesis

        def check_events():
            if len(EVENTS) != num_events:
                LOGGER.warning((
                    'DynamoDB and Kinesis updates retrieved (actual/expected): %s/%s'
                ) % (len(EVENTS), num_events))
            self.assertEqual(len(EVENTS), num_events)
            event_items = [
                json.loads(base64.b64decode(e['data'])) for e in EVENTS
            ]
            # make sure the we have the right amount of INSERT/MODIFY event types
            inserts = [
                e for e in event_items if e.get('__action_type') == 'INSERT'
            ]
            modifies = [
                e for e in event_items if e.get('__action_type') == 'MODIFY'
            ]
            self.assertEqual(len(inserts), num_put_new_items + num_batch_items)
            self.assertEqual(len(modifies),
                             num_put_existing_items + num_updates_ddb)

        # this can take a long time in CI, make sure we give it enough time/retries
        retry(check_events, retries=9, sleep=3)

        # check cloudwatch notifications
        num_invocations = get_lambda_invocations_count(TEST_LAMBDA_NAME_STREAM)
        # TODO: It seems that CloudWatch is currently reporting an incorrect number of
        #   invocations, namely the sum over *all* lambdas, not the single one we're asking for.
        #   Also, we need to bear in mind that Kinesis may perform batch updates, i.e., a single
        #   Lambda invocation may happen with a set of Kinesis records, hence we cannot simply
        #   add num_events_ddb to num_events_lambda above!
        # self.assertEqual(num_invocations, 2 + num_events_lambda)
        self.assertGreater(num_invocations, num_events_sns + num_events_sqs)
        num_error_invocations = get_lambda_invocations_count(
            TEST_LAMBDA_NAME_STREAM, 'Errors')
        self.assertEqual(num_error_invocations, 1)
Пример #9
0
 def create_kinesis_stream(name, delete=False):
     stream = aws_stack.create_kinesis_stream(name, delete=delete)
     stream.wait_for()
Пример #10
0
def test_kinesis_lambda_ddb_streams():

    env = ENV_DEV
    dynamodb = aws_stack.connect_to_resource('dynamodb', env=env)
    dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams', env=env)
    kinesis = aws_stack.connect_to_service('kinesis', env=env)

    print('Creating test streams...')
    aws_stack.create_kinesis_stream(TEST_STREAM_NAME)
    aws_stack.create_kinesis_stream(TEST_LAMBDA_SOURCE_STREAM_NAME)

    # subscribe to inbound Kinesis stream
    def process_records(records, shard_id):
        EVENTS.extend(records)

    # start the KCL client process in the background
    kinesis_connector.listen_to_kinesis(TEST_STREAM_NAME, listener_func=process_records,
        wait_until_started=True)

    print("Kinesis consumer initialized.")

    # create table with stream forwarding config
    testutil.create_dynamodb_table(TEST_TABLE_NAME, partition_key=PARTITION_KEY,
        env=env, stream_view_type='NEW_AND_OLD_IMAGES')

    # list DDB streams and make sure the table stream is there
    streams = dynamodbstreams.list_streams()
    ddb_event_source_arn = None
    for stream in streams['Streams']:
        if stream['TableName'] == TEST_TABLE_NAME:
            ddb_event_source_arn = stream['StreamArn']
    assert ddb_event_source_arn

    # deploy test lambda connected to DynamoDB Stream
    script = load_file(os.path.join(LOCALSTACK_ROOT_FOLDER, 'tests', 'lambdas', 'lambda_integration.py'))
    zip_file = testutil.create_lambda_archive(script, get_content=True)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME,
        zip_file=zip_file, event_source_arn=ddb_event_source_arn)

    # deploy test lambda connected to Kinesis Stream
    kinesis_event_source_arn = kinesis.describe_stream(
        StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)['StreamDescription']['StreamARN']
    testutil.create_lambda_function(func_name=TEST_LAMBDA_SOURCE_STREAM_NAME,
        zip_file=zip_file, event_source_arn=kinesis_event_source_arn)

    # put items to table
    num_events_ddb = 10
    print('Putting %s items to table...' % num_events_ddb)
    table = dynamodb.Table(TEST_TABLE_NAME)
    for i in range(0, num_events_ddb):
        table.put_item(Item={
            PARTITION_KEY: 'testId%s' % i,
            'data': 'foobar123'
        })

    # put items to stream
    num_events_kinesis = 10
    print('Putting %s items to stream...' % num_events_kinesis)
    kinesis.put_records(
        Records=[
            {
                'Data': '{}',
                'PartitionKey': 'testId%s' % i
            } for i in range(0, num_events_kinesis)
        ],
        StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME
    )

    print("Waiting some time before finishing test.")
    time.sleep(4)

    num_events = num_events_ddb + num_events_kinesis
    print('DynamoDB and Kinesis updates retrieved (actual/expected): %s/%s' % (len(EVENTS), num_events))
    if len(EVENTS) != num_events:
        print('ERROR receiving DynamoDB updates.')
    assert len(EVENTS) == num_events
Пример #11
0
    def test_lambda_streams_batch_and_transactions(self):
        ddb_lease_table_suffix = '-kclapp2'
        table_name = TEST_TABLE_NAME + 'lsbat' + ddb_lease_table_suffix
        stream_name = TEST_STREAM_NAME
        lambda_ddb_name = 'lambda-ddb-%s' % short_uid()
        dynamodb = aws_stack.connect_to_service('dynamodb', client=True)
        dynamodb_service = aws_stack.connect_to_service('dynamodb')
        dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams')

        LOGGER.info('Creating test streams...')
        run_safe(lambda: dynamodb_service.delete_table(
            TableName=stream_name + ddb_lease_table_suffix), print_error=False)
        aws_stack.create_kinesis_stream(stream_name, delete=True)

        events = []

        # subscribe to inbound Kinesis stream
        def process_records(records, shard_id):
            events.extend(records)

        # start the KCL client process in the background
        kinesis_connector.listen_to_kinesis(
            stream_name,
            listener_func=process_records,
            wait_until_started=True,
            ddb_lease_table_suffix=ddb_lease_table_suffix
        )

        LOGGER.info('Kinesis consumer initialized.')

        # create table with stream forwarding config
        aws_stack.create_dynamodb_table(
            table_name,
            partition_key=PARTITION_KEY,
            stream_view_type='NEW_AND_OLD_IMAGES'
        )

        # list DDB streams and make sure the table stream is there
        streams = dynamodbstreams.list_streams()
        ddb_event_source_arn = None
        for stream in streams['Streams']:
            if stream['TableName'] == table_name:
                ddb_event_source_arn = stream['StreamArn']
        self.assertTrue(ddb_event_source_arn)

        # deploy test lambda connected to DynamoDB Stream
        testutil.create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON, libs=TEST_LAMBDA_LIBS, func_name=lambda_ddb_name,
            event_source_arn=ddb_event_source_arn, delete=True)

        # submit a batch with writes
        dynamodb.batch_write_item(RequestItems={table_name: [
            {'PutRequest': {'Item': {PARTITION_KEY: {'S': 'testId0'}, 'data': {'S': 'foobar123'}}}},
            {'PutRequest': {'Item': {PARTITION_KEY: {'S': 'testId1'}, 'data': {'S': 'foobar123'}}}},
            {'PutRequest': {'Item': {PARTITION_KEY: {'S': 'testId2'}, 'data': {'S': 'foobar123'}}}}
        ]})

        # submit a batch with writes and deletes
        dynamodb.batch_write_item(RequestItems={table_name: [
            {'PutRequest': {'Item': {PARTITION_KEY: {'S': 'testId3'}, 'data': {'S': 'foobar123'}}}},
            {'PutRequest': {'Item': {PARTITION_KEY: {'S': 'testId4'}, 'data': {'S': 'foobar123'}}}},
            {'PutRequest': {'Item': {PARTITION_KEY: {'S': 'testId5'}, 'data': {'S': 'foobar123'}}}},
            {'DeleteRequest': {'Key': {PARTITION_KEY: {'S': 'testId0'}}}},
            {'DeleteRequest': {'Key': {PARTITION_KEY: {'S': 'testId1'}}}},
            {'DeleteRequest': {'Key': {PARTITION_KEY: {'S': 'testId2'}}}},
        ]})

        # submit a transaction with writes and delete
        dynamodb.transact_write_items(TransactItems=[
            {'Put': {'TableName': table_name,
                     'Item': {PARTITION_KEY: {'S': 'testId6'}, 'data': {'S': 'foobar123'}}}},
            {'Put': {'TableName': table_name,
                     'Item': {PARTITION_KEY: {'S': 'testId7'}, 'data': {'S': 'foobar123'}}}},
            {'Put': {'TableName': table_name,
                     'Item': {PARTITION_KEY: {'S': 'testId8'}, 'data': {'S': 'foobar123'}}}},
            {'Delete': {'TableName': table_name, 'Key': {PARTITION_KEY: {'S': 'testId3'}}}},
            {'Delete': {'TableName': table_name, 'Key': {PARTITION_KEY: {'S': 'testId4'}}}},
            {'Delete': {'TableName': table_name, 'Key': {PARTITION_KEY: {'S': 'testId5'}}}},
        ])

        # submit a batch with a put over existing item
        dynamodb.transact_write_items(TransactItems=[
            {
                'Put': {
                    'TableName': table_name,
                    'Item': {PARTITION_KEY: {'S': 'testId6'}, 'data': {'S': 'foobar123_updated1'}}
                }
            },
        ])

        # submit a transaction with a put over existing item
        dynamodb.transact_write_items(TransactItems=[
            {'Put': {'TableName': table_name,
                     'Item': {PARTITION_KEY: {'S': 'testId7'}, 'data': {'S': 'foobar123_updated1'}}}},
        ])

        # submit a transaction with updates
        dynamodb.transact_write_items(TransactItems=[
            {'Update': {'TableName': table_name, 'Key': {PARTITION_KEY: {'S': 'testId6'}},
                        'UpdateExpression': 'SET #0 = :0',
                        'ExpressionAttributeNames': {'#0': 'data'},
                        'ExpressionAttributeValues': {':0': {'S': 'foobar123_updated2'}}}},
            {'Update': {'TableName': table_name, 'Key': {PARTITION_KEY: {'S': 'testId7'}},
                        'UpdateExpression': 'SET #0 = :0',
                        'ExpressionAttributeNames': {'#0': 'data'},
                        'ExpressionAttributeValues': {':0': {'S': 'foobar123_updated2'}}}},
            {'Update': {'TableName': table_name, 'Key': {PARTITION_KEY: {'S': 'testId8'}},
                        'UpdateExpression': 'SET #0 = :0',
                        'ExpressionAttributeNames': {'#0': 'data'},
                        'ExpressionAttributeValues': {':0': {'S': 'foobar123_updated2'}}}},
        ])

        LOGGER.info('Waiting some time before finishing test.')
        time.sleep(2)

        num_insert = 9
        num_modify = 5
        num_delete = 6
        num_events = num_insert + num_modify + num_delete

        def check_events():
            if len(events) != num_events:
                msg = 'DynamoDB updates retrieved (actual/expected): %s/%s' % (len(events), num_events)
                LOGGER.warning(msg)
            self.assertEqual(num_events, len(events))
            event_items = [json.loads(base64.b64decode(e['data'])) for e in events]
            # make sure the we have the right amount of expected event types
            inserts = [e for e in event_items if e.get('__action_type') == 'INSERT']
            modifies = [e for e in event_items if e.get('__action_type') == 'MODIFY']
            removes = [e for e in event_items if e.get('__action_type') == 'REMOVE']
            self.assertEqual(num_insert, len(inserts))
            self.assertEqual(num_modify, len(modifies))
            self.assertEqual(num_delete, len(removes))

            # assert that all inserts were received

            for i, event in enumerate(inserts):
                self.assertNotIn('old_image', event)
                item_id = 'testId%d' % i
                matching = [i for i in inserts if i['new_image']['id'] == item_id][0]
                self.assertEqual({'id': item_id, 'data': 'foobar123'}, matching['new_image'])

            # assert that all updates were received

            def assert_updates(expected_updates, modifies):
                def found(update):
                    for modif in modifies:
                        if modif['old_image']['id'] == update['id']:
                            self.assertEqual(modif['old_image'], {'id': update['id'], 'data': update['old']})
                            self.assertEqual(modif['new_image'], {'id': update['id'], 'data': update['new']})
                            return True
                for update in expected_updates:
                    self.assertTrue(found(update))

            updates1 = [
                {'id': 'testId6', 'old': 'foobar123', 'new': 'foobar123_updated1'},
                {'id': 'testId7', 'old': 'foobar123', 'new': 'foobar123_updated1'}
            ]
            updates2 = [
                {'id': 'testId6', 'old': 'foobar123_updated1', 'new': 'foobar123_updated2'},
                {'id': 'testId7', 'old': 'foobar123_updated1', 'new': 'foobar123_updated2'},
                {'id': 'testId8', 'old': 'foobar123', 'new': 'foobar123_updated2'}
            ]

            assert_updates(updates1, modifies[:2])
            assert_updates(updates2, modifies[2:])

            # assert that all removes were received

            for i, event in enumerate(removes):
                self.assertNotIn('new_image', event)
                item_id = 'testId%d' % i
                matching = [i for i in removes if i['old_image']['id'] == item_id][0]
                self.assertEqual({'id': item_id, 'data': 'foobar123'}, matching['old_image'])

        # this can take a long time in CI, make sure we give it enough time/retries
        retry(check_events, retries=9, sleep=4)

        # clean up
        testutil.delete_lambda_function(lambda_ddb_name)
Пример #12
0
    def test_kinesis_lambda_parallelism(self, lambda_client, kinesis_client):
        function_name = f"lambda_func-{short_uid()}"
        stream_name = f"test-foobar-{short_uid()}"

        testutil.create_lambda_function(
            handler_file=TEST_LAMBDA_PARALLEL_FILE,
            func_name=function_name,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        arn = aws_stack.kinesis_stream_arn(stream_name, account_id="000000000000")

        lambda_client.create_event_source_mapping(EventSourceArn=arn, FunctionName=function_name)

        def process_records(record):
            assert record

        aws_stack.create_kinesis_stream(stream_name, delete=True)
        kinesis_connector.listen_to_kinesis(
            stream_name=stream_name,
            listener_func=process_records,
            wait_until_started=True,
        )

        kinesis = aws_stack.create_external_boto_client("kinesis")
        stream_summary = kinesis.describe_stream_summary(StreamName=stream_name)
        assert 1 == stream_summary["StreamDescriptionSummary"]["OpenShardCount"]
        num_events_kinesis = 10
        # assure async call
        start = time.perf_counter()
        kinesis.put_records(
            Records=[
                {"Data": '{"batch": 0}', "PartitionKey": f"test_{i}"}
                for i in range(0, num_events_kinesis)
            ],
            StreamName=stream_name,
        )
        assert (time.perf_counter() - start) < 1  # this should not take more than a second
        kinesis.put_records(
            Records=[
                {"Data": '{"batch": 1}', "PartitionKey": f"test_{i}"}
                for i in range(0, num_events_kinesis)
            ],
            StreamName=stream_name,
        )

        def get_events():
            events = get_lambda_log_events(function_name, regex_filter=r"event.*Records")
            assert len(events) == 2
            return events

        events = retry(get_events, retries=5)

        def assertEvent(event, batch_no):
            assert 10 == len(event["event"]["Records"])

            assert "eventID" in event["event"]["Records"][0]
            assert "eventSourceARN" in event["event"]["Records"][0]
            assert "eventSource" in event["event"]["Records"][0]
            assert "eventVersion" in event["event"]["Records"][0]
            assert "eventName" in event["event"]["Records"][0]
            assert "invokeIdentityArn" in event["event"]["Records"][0]
            assert "awsRegion" in event["event"]["Records"][0]
            assert "kinesis" in event["event"]["Records"][0]

            assert {"batch": batch_no} == json.loads(
                base64.b64decode(event["event"]["Records"][0]["kinesis"]["data"]).decode(
                    config.DEFAULT_ENCODING
                )
            )

        assertEvent(events[0], 0)
        assertEvent(events[1], 1)

        assert (events[1]["executionStart"] - events[0]["executionStart"]) > 5

        # cleanup
        lambda_client.delete_function(FunctionName=function_name)
        kinesis_client.delete_stream(StreamName=stream_name)
Пример #13
0
def test_kinesis_lambda_sns_ddb_streams():

    ddb_lease_table_suffix = '-kclapp'
    dynamodb = aws_stack.connect_to_resource('dynamodb')
    dynamodb_service = aws_stack.connect_to_service('dynamodb')
    dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams')
    kinesis = aws_stack.connect_to_service('kinesis')
    sns = aws_stack.connect_to_service('sns')

    LOGGER.info('Creating test streams...')
    run_safe(lambda: dynamodb_service.delete_table(
        TableName=TEST_STREAM_NAME + ddb_lease_table_suffix), print_error=False)
    aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True)
    aws_stack.create_kinesis_stream(TEST_LAMBDA_SOURCE_STREAM_NAME)

    # subscribe to inbound Kinesis stream
    def process_records(records, shard_id):
        EVENTS.extend(records)

    # start the KCL client process in the background
    kinesis_connector.listen_to_kinesis(TEST_STREAM_NAME, listener_func=process_records,
        wait_until_started=True, ddb_lease_table_suffix=ddb_lease_table_suffix)

    LOGGER.info('Kinesis consumer initialized.')

    # create table with stream forwarding config
    testutil.create_dynamodb_table(TEST_TABLE_NAME, partition_key=PARTITION_KEY,
        stream_view_type='NEW_AND_OLD_IMAGES')

    # list DDB streams and make sure the table stream is there
    streams = dynamodbstreams.list_streams()
    ddb_event_source_arn = None
    for stream in streams['Streams']:
        if stream['TableName'] == TEST_TABLE_NAME:
            ddb_event_source_arn = stream['StreamArn']
    assert ddb_event_source_arn

    # deploy test lambda connected to DynamoDB Stream
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_DDB,
        zip_file=zip_file, event_source_arn=ddb_event_source_arn, runtime=LAMBDA_RUNTIME_PYTHON27)
    # make sure we cannot create Lambda with same name twice
    assert_raises(Exception, testutil.create_lambda_function, func_name=TEST_LAMBDA_NAME_DDB,
        zip_file=zip_file, event_source_arn=ddb_event_source_arn, runtime=LAMBDA_RUNTIME_PYTHON27)

    # deploy test lambda connected to Kinesis Stream
    kinesis_event_source_arn = kinesis.describe_stream(
        StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)['StreamDescription']['StreamARN']
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_STREAM,
        zip_file=zip_file, event_source_arn=kinesis_event_source_arn, runtime=LAMBDA_RUNTIME_PYTHON27)

    # set number of items to update/put to table
    num_events_ddb = 15
    num_put_new_items = 5
    num_put_existing_items = 2
    num_batch_items = 3
    num_updates_ddb = num_events_ddb - num_put_new_items - num_put_existing_items - num_batch_items

    LOGGER.info('Putting %s items to table...' % num_events_ddb)
    table = dynamodb.Table(TEST_TABLE_NAME)
    for i in range(0, num_put_new_items):
        table.put_item(Item={
            PARTITION_KEY: 'testId%s' % i,
            'data': 'foobar123'
        })
    # Put items with an already existing ID (fix https://github.com/localstack/localstack/issues/522)
    for i in range(0, num_put_existing_items):
        table.put_item(Item={
            PARTITION_KEY: 'testId%s' % i,
            'data': 'foobar123_put_existing'
        })

    # batch write some items containing non-ASCII characters
    dynamodb.batch_write_item(RequestItems={TEST_TABLE_NAME: [
        {'PutRequest': {'Item': {PARTITION_KEY: short_uid(), 'data': 'foobar123 ✓'}}},
        {'PutRequest': {'Item': {PARTITION_KEY: short_uid(), 'data': 'foobar123 £'}}},
        {'PutRequest': {'Item': {PARTITION_KEY: short_uid(), 'data': 'foobar123 ¢'}}}
    ]})
    # update some items, which also triggers notification events
    for i in range(0, num_updates_ddb):
        dynamodb_service.update_item(TableName=TEST_TABLE_NAME,
            Key={PARTITION_KEY: {'S': 'testId%s' % i}},
            AttributeUpdates={'data': {
                'Action': 'PUT',
                'Value': {'S': 'foobar123_updated'}
            }})

    # put items to stream
    num_events_kinesis = 10
    LOGGER.info('Putting %s items to stream...' % num_events_kinesis)
    kinesis.put_records(
        Records=[
            {
                'Data': '{}',
                'PartitionKey': 'testId%s' % i
            } for i in range(0, num_events_kinesis)
        ], StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME
    )

    # put 1 item to stream that will trigger an error in the Lambda
    kinesis.put_record(Data='{"%s": 1}' % lambda_integration.MSG_BODY_RAISE_ERROR_FLAG,
        PartitionKey='testIderror', StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)

    # create SNS topic, connect it to the Lambda, publish test message
    num_events_sns = 3
    response = sns.create_topic(Name=TEST_TOPIC_NAME)
    sns.subscribe(TopicArn=response['TopicArn'], Protocol='lambda',
        Endpoint=aws_stack.lambda_function_arn(TEST_LAMBDA_NAME_STREAM))
    for i in range(0, num_events_sns):
        sns.publish(TopicArn=response['TopicArn'], Message='test message %s' % i)

    # get latest records
    latest = aws_stack.kinesis_get_latest_records(TEST_LAMBDA_SOURCE_STREAM_NAME,
        shard_id='shardId-000000000000', count=10)
    assert len(latest) == 10

    LOGGER.info('Waiting some time before finishing test.')
    time.sleep(2)

    num_events = num_events_ddb + num_events_kinesis + num_events_sns

    def check_events():
        if len(EVENTS) != num_events:
            LOGGER.warning(('DynamoDB and Kinesis updates retrieved ' +
                '(actual/expected): %s/%s') % (len(EVENTS), num_events))
        assert len(EVENTS) == num_events
        event_items = [json.loads(base64.b64decode(e['data'])) for e in EVENTS]
        inserts = [e for e in event_items if e.get('__action_type') == 'INSERT']
        modifies = [e for e in event_items if e.get('__action_type') == 'MODIFY']
        assert len(inserts) == num_put_new_items + num_batch_items
        assert len(modifies) == num_put_existing_items + num_updates_ddb

    # this can take a long time in CI, make sure we give it enough time/retries
    retry(check_events, retries=7, sleep=3)

    # make sure the we have the right amount of INSERT/MODIFY event types

    # check cloudwatch notifications
    stats1 = get_lambda_metrics(TEST_LAMBDA_NAME_STREAM)
    assert len(stats1['Datapoints']) == 2 + num_events_sns
    stats2 = get_lambda_metrics(TEST_LAMBDA_NAME_STREAM, 'Errors')
    assert len(stats2['Datapoints']) == 1
    stats3 = get_lambda_metrics(TEST_LAMBDA_NAME_DDB)
    assert len(stats3['Datapoints']) == num_events_ddb
Пример #14
0
def start_test(env=ENV_DEV):
    try:
        # setup environment
        if env == ENV_DEV:
            infra.start_infra(async=True)
            time.sleep(6)

        dynamodb = aws_stack.connect_to_resource('dynamodb', env=env)
        dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams', env=env)
        kinesis = aws_stack.connect_to_service('kinesis', env=env)

        print('Creating stream...')
        aws_stack.create_kinesis_stream(TEST_STREAM_NAME)

        # subscribe to inbound Kinesis stream
        def process_records(records, shard_id):
            EVENTS.extend(records)

        # start the KCL client process in the background
        kinesis_connector.listen_to_kinesis(TEST_STREAM_NAME, listener_func=process_records,
            wait_until_started=True)

        print("Kinesis consumer initialized.")

        # create table with stream forwarding config
        create_dynamodb_table(TEST_TABLE_NAME, partition_key=PARTITION_KEY,
            env=env, stream_view_type='NEW_AND_OLD_IMAGES')

        # list streams and make sure the table stream is there
        streams = dynamodbstreams.list_streams()
        event_source_arn = None
        for stream in streams['Streams']:
            if stream['TableName'] == TEST_TABLE_NAME:
                event_source_arn = stream['StreamArn']
        assert event_source_arn

        # deploy test lambda
        script = load_file(os.path.join(LOCALSTACK_ROOT_FOLDER, 'tests', 'lambdas', 'lambda_integration.py'))
        zip_file = create_lambda_archive(script, get_content=True)
        create_lambda_function(func_name=TEST_LAMBDA_NAME, zip_file=zip_file, event_source_arn=event_source_arn)

        # put items to table
        num_events = 10
        print('Putting %s items to table...' % num_events)
        table = dynamodb.Table(TEST_TABLE_NAME)
        for i in range(0, num_events):
            table.put_item(Item={
                PARTITION_KEY: 'testId123',
                'data': 'foobar123'
            })

        print("Waiting some time before finishing test.")
        time.sleep(10)

        print('DynamoDB updates retrieved via Kinesis (actual/expected): %s/%s' % (len(EVENTS), num_events))
        if len(EVENTS) != num_events:
            print('ERROR receiving DynamoDB updates. Running processes:')
            print(run("ps aux | grep 'python\|java\|node'"))
        assert len(EVENTS) == num_events

        print("Test finished successfully")
        cleanup(env=env)

    except KeyboardInterrupt, e:
        infra.KILLED = True
def start_test(env=ENV_DEV):
    try:
        # setup environment
        if env == ENV_DEV:
            infra.start_infra(async=True)
            time.sleep(6)

        dynamodb = aws_stack.connect_to_resource('dynamodb', env=env)
        dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams',
                                                       env=env)
        kinesis = aws_stack.connect_to_service('kinesis', env=env)

        print('Creating stream...')
        aws_stack.create_kinesis_stream(TEST_STREAM_NAME)

        # subscribe to inbound Kinesis stream
        def process_records(records, shard_id):
            EVENTS.extend(records)

        # start the KCL client process in the background
        kinesis_connector.listen_to_kinesis(TEST_STREAM_NAME,
                                            listener_func=process_records,
                                            wait_until_started=True)

        print("Kinesis consumer initialized.")

        # create table with stream forwarding config
        create_dynamodb_table(TEST_TABLE_NAME,
                              partition_key=PARTITION_KEY,
                              env=env,
                              stream_view_type='NEW_AND_OLD_IMAGES')

        # list streams and make sure the table stream is there
        streams = dynamodbstreams.list_streams()
        event_source_arn = None
        for stream in streams['Streams']:
            if stream['TableName'] == TEST_TABLE_NAME:
                event_source_arn = stream['StreamArn']
        assert event_source_arn

        # deploy test lambda
        script = load_file(
            os.path.join(LOCALSTACK_ROOT_FOLDER, 'tests', 'lambdas',
                         'lambda_integration.py'))
        zip_file = create_lambda_archive(script, get_content=True)
        create_lambda_function(func_name=TEST_LAMBDA_NAME,
                               zip_file=zip_file,
                               event_source_arn=event_source_arn)

        # put items to table
        num_events = 10
        print('Putting %s items to table...' % num_events)
        table = dynamodb.Table(TEST_TABLE_NAME)
        for i in range(0, num_events):
            table.put_item(Item={
                PARTITION_KEY: 'testId123',
                'data': 'foobar123'
            })

        print("Waiting some time before finishing test.")
        time.sleep(10)

        print(
            'DynamoDB updates retrieved via Kinesis (actual/expected): %s/%s' %
            (len(EVENTS), num_events))
        if len(EVENTS) != num_events:
            print('ERROR receiving DynamoDB updates. Running processes:')
            print(run("ps aux | grep 'python\|java\|node'"))
        assert len(EVENTS) == num_events

        print("Test finished successfully")
        cleanup(env=env)

    except KeyboardInterrupt, e:
        infra.KILLED = True