コード例 #1
0
    def setUpClass(cls):
        cls.lambda_client = aws_stack.create_external_boto_client("lambda")
        cls.s3_client = aws_stack.create_external_boto_client("s3")
        cls.sfn_client = aws_stack.create_external_boto_client("stepfunctions")

        zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_ENV), get_content=True)
        zip_file2 = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON_ECHO), get_content=True
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_1,
            zip_file=zip_file,
            envvars={"Hello": TEST_RESULT_VALUE},
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_2,
            zip_file=zip_file,
            envvars={"Hello": TEST_RESULT_VALUE},
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_3,
            zip_file=zip_file,
            envvars={"Hello": "Replace Value"},
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_4,
            zip_file=zip_file,
            envvars={"Hello": TEST_RESULT_VALUE},
        )
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_5, zip_file=zip_file2)
コード例 #2
0
def test_kinesis_lambda_forward_chain():
    kinesis = aws_stack.connect_to_service('kinesis')
    s3 = aws_stack.connect_to_service('s3')

    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM1_NAME, delete=True)
    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM2_NAME, delete=True)
    s3.create_bucket(Bucket=TEST_BUCKET_NAME)

    # deploy test lambdas connected to Kinesis streams
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA1_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM1_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA2_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM2_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)

    # publish test record
    test_data = {'test_data': 'forward_chain_data_%s' % short_uid()}
    data = clone(test_data)
    data[lambda_integration.MSG_BODY_MESSAGE_TARGET] = 'kinesis:%s' % TEST_CHAIN_STREAM2_NAME
    kinesis.put_record(Data=to_bytes(json.dumps(data)), PartitionKey='testId', StreamName=TEST_CHAIN_STREAM1_NAME)

    # check results
    time.sleep(5)
    all_objects = testutil.list_all_s3_objects()
    testutil.assert_objects(test_data, all_objects)
コード例 #3
0
ファイル: test_sns.py プロジェクト: winewise/localstack
    def test_dead_letter_queue(self):
        lambda_name = 'test-%s' % short_uid()
        lambda_arn = aws_stack.lambda_function_arn(lambda_name)
        topic_name = 'test-%s' % short_uid()
        topic_arn = self.sns_client.create_topic(Name=topic_name)['TopicArn']
        queue_name = 'test-%s' % short_uid()
        queue_url = self.sqs_client.create_queue(QueueName=queue_name)['QueueUrl']
        queue_arn = aws_stack.sqs_queue_arn(queue_name)

        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON), get_content=True, libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON36,
        )
        testutil.create_lambda_function(
            func_name=lambda_name, zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON36,
            DeadLetterConfig={'TargetArn': queue_arn},
        )
        self.sns_client.subscribe(TopicArn=topic_arn, Protocol='lambda', Endpoint=lambda_arn)

        payload = {
            lambda_integration.MSG_BODY_RAISE_ERROR_FLAG: 1,
        }
        self.sns_client.publish(TopicArn=topic_arn, Message=json.dumps(payload))

        def receive_dlq():
            result = self.sqs_client.receive_message(QueueUrl=queue_url, MessageAttributeNames=['All'])
            msg_attrs = result['Messages'][0]['MessageAttributes']
            self.assertGreater(len(result['Messages']), 0)
            self.assertIn('RequestID', msg_attrs)
            self.assertIn('ErrorCode', msg_attrs)
            self.assertIn('ErrorMessage', msg_attrs)
        retry(receive_dlq, retries=8, sleep=2)
コード例 #4
0
    def test_large_lambda(self, lambda_client, s3_client, s3_bucket):
        function_name = f"test_lambda_{short_uid()}"
        bucket_key = "test_lambda.zip"
        code_str = generate_sized_python_str(FUNCTION_MAX_UNZIPPED_SIZE - 1000)

        # upload zip file to S3
        zip_file = testutil.create_lambda_archive(
            code_str, get_content=True, runtime=LAMBDA_RUNTIME_PYTHON37
        )
        s3_client.upload_fileobj(BytesIO(zip_file), s3_bucket, bucket_key)

        # create lambda function
        result = lambda_client.create_function(
            FunctionName=function_name,
            Runtime=LAMBDA_RUNTIME_PYTHON37,
            Handler=LAMBDA_DEFAULT_HANDLER,
            Role=LAMBDA_TEST_ROLE,
            Code={"S3Bucket": s3_bucket, "S3Key": bucket_key},
            Timeout=10,
        )

        function_arn = result["FunctionArn"]
        assert testutil.response_arn_matches_partition(lambda_client, function_arn)

        # clean up
        lambda_client.delete_function(FunctionName=function_name)
        with pytest.raises(Exception) as exc:
            lambda_client.delete_function(FunctionName=function_name)
        exc.match("ResourceNotFoundException")
コード例 #5
0
ファイル: test_lambda.py プロジェクト: ssmbcloud/localstack
    def test_python_lambda_running_in_docker(self):
        if not use_docker():
            return

        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON3),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON36
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_PY3,
            zip_file=zip_file,
            runtime=LAMBDA_RUNTIME_PYTHON36
        )

        result = self.lambda_client.invoke(
            FunctionName=TEST_LAMBDA_NAME_PY3, Payload=b'{}')
        result_data = result['Payload'].read()

        self.assertEqual(result['StatusCode'], 200)
        self.assertEqual(to_str(result_data).strip(), '{}')

        # clean up
        testutil.delete_lambda_function(TEST_LAMBDA_NAME_PY3)
コード例 #6
0
ファイル: test_lambda.py プロジェクト: yamachu/localstack
    def test_dead_letter_queue(self):
        sqs_client = aws_stack.connect_to_service('sqs')
        lambda_client = aws_stack.connect_to_service('lambda')

        # create DLQ and Lambda function
        queue_name = 'test-%s' % short_uid()
        lambda_name = 'test-%s' % short_uid()
        queue_url = sqs_client.create_queue(QueueName=queue_name)['QueueUrl']
        queue_arn = aws_stack.sqs_queue_arn(queue_name)
        zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON),
            get_content=True, libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON36)
        testutil.create_lambda_function(func_name=lambda_name, zip_file=zip_file,
            runtime=LAMBDA_RUNTIME_PYTHON36, DeadLetterConfig={'TargetArn': queue_arn})

        # invoke Lambda, triggering an error
        payload = {
            lambda_integration.MSG_BODY_RAISE_ERROR_FLAG: 1
        }
        lambda_client.invoke(FunctionName=lambda_name,
            Payload=json.dumps(payload), InvocationType='Event')

        # assert that message has been received on the DLQ
        def receive_dlq():
            result = sqs_client.receive_message(QueueUrl=queue_url, MessageAttributeNames=['All'])
            self.assertGreater(len(result['Messages']), 0)
            msg_attrs = result['Messages'][0]['MessageAttributes']
            self.assertIn('RequestID', msg_attrs)
            self.assertIn('ErrorCode', msg_attrs)
            self.assertIn('ErrorMessage', msg_attrs)
        retry(receive_dlq, retries=8, sleep=2)
コード例 #7
0
def test_api_gateway_lambda_proxy_integration_any_method():
    # create lambda function
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_PROXY_BACKEND_ANY_METHOD,
        zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27)

    # create API Gateway and connect it to the Lambda proxy backend
    lambda_uri = aws_stack.lambda_function_arn(TEST_LAMBDA_PROXY_BACKEND_ANY_METHOD)
    target_uri = aws_stack.apigateway_invocations_arn(lambda_uri)

    result = connect_api_gateway_to_http_with_lambda_proxy('test_gateway3', target_uri,
        methods=['ANY'],
        path=API_PATH_LAMBDA_PROXY_BACKEND_ANY_METHOD)

    # make test request to gateway and check response
    path = API_PATH_LAMBDA_PROXY_BACKEND_ANY_METHOD.replace('{test_param1}', 'foo1')
    url = INBOUND_GATEWAY_URL_PATTERN.format(api_id=result['id'], stage_name=TEST_STAGE_NAME, path=path)
    data = {}

    for method in ('GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'):
        body = json.dumps(data) if method in ('POST', 'PUT', 'PATCH') else None
        result = getattr(requests, method.lower())(url, data=body)
        assert result.status_code == 200
        parsed_body = json.loads(to_str(result.content))
        assert parsed_body.get('httpMethod') == method
コード例 #8
0
    def test_create_lambda_function(self):
        func_name = 'lambda_func-{}'.format(short_uid())
        kms_key_arn = 'arn:aws:kms:us-east-1:000000000000:key11'
        vpc_config = {
            'SubnetIds': ['subnet-123456789'],
            'SecurityGroupIds': ['sg-123456789']
        }
        tags = {'env': 'testing'}

        kwargs = {
            'FunctionName': func_name,
            'Runtime': LAMBDA_RUNTIME_PYTHON37,
            'Handler': LAMBDA_DEFAULT_HANDLER,
            'Role': LAMBDA_TEST_ROLE,
            'KMSKeyArn': kms_key_arn,
            'Code': {
                'ZipFile':
                create_lambda_archive(load_file(TEST_LAMBDA_PYTHON_ECHO),
                                      get_content=True)
            },
            'Timeout': 3,
            'VpcConfig': vpc_config,
            'Tags': tags
        }

        client = aws_stack.connect_to_service('lambda')
        client.create_function(**kwargs)

        rs = client.get_function(FunctionName=func_name)

        self.assertEqual(rs['Configuration'].get('KMSKeyArn', ''), kms_key_arn)
        self.assertEqual(rs['Configuration'].get('VpcConfig', {}), vpc_config)
        self.assertEqual(rs['Tags'], tags)

        client.delete_function(FunctionName=func_name)
コード例 #9
0
def test_upload_lambda_from_s3():

    s3_client = aws_stack.connect_to_service('s3')
    lambda_client = aws_stack.connect_to_service('lambda')

    lambda_name = 'test_lambda_%s' % short_uid()
    bucket_name = 'test_bucket_lambda'
    bucket_key = 'test_lambda.zip'

    # upload zip file to S3
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON),
                                              get_content=True,
                                              libs=TEST_LAMBDA_LIBS,
                                              runtime=LAMBDA_RUNTIME_PYTHON27)
    s3_client.create_bucket(Bucket=bucket_name)
    s3_client.upload_fileobj(BytesIO(zip_file), bucket_name, bucket_key)

    # create lambda function
    lambda_client.create_function(FunctionName=lambda_name,
                                  Handler='handler.handler',
                                  Runtime=lambda_api.LAMBDA_RUNTIME_PYTHON27,
                                  Role='r1',
                                  Code={
                                      'S3Bucket': bucket_name,
                                      'S3Key': bucket_key
                                  })

    # invoke lambda function
    data_before = b'{"foo": "bar"}'
    result = lambda_client.invoke(FunctionName=lambda_name,
                                  Payload=data_before)
    data_after = result['Payload'].read()
    assert json.loads(to_str(data_before)) == json.loads(to_str(data_after))
コード例 #10
0
    def setUpClass(cls):
        cls.lambda_client = aws_stack.connect_to_service('lambda')
        cls.s3_client = aws_stack.connect_to_service('s3')
        cls.sfn_client = aws_stack.connect_to_service('stepfunctions')

        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON),
            get_content=True,
            runtime=LAMBDA_RUNTIME_PYTHON36)
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_1,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON36,
                                        envvars={'Hello': TEST_RESULT_VALUE})
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_2,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON36,
                                        envvars={'Hello': TEST_RESULT_VALUE})
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_3,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON36,
                                        envvars={'Hello': 'Replace Value'})
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_4,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON36,
                                        envvars={'Hello': TEST_RESULT_VALUE})
コード例 #11
0
ファイル: test_lambda.py プロジェクト: bbc/localstack
def test_upload_lambda_from_s3():

    s3_client = aws_stack.connect_to_service('s3')
    lambda_client = aws_stack.connect_to_service('lambda')

    lambda_name = 'test_lambda_%s' % short_uid()
    bucket_name = 'test_bucket_lambda'
    bucket_key = 'test_lambda.zip'

    # upload zip file to S3
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    s3_client.create_bucket(Bucket=bucket_name)
    s3_client.upload_fileobj(BytesIO(zip_file), bucket_name, bucket_key)

    # create lambda function
    lambda_client.create_function(
        FunctionName=lambda_name, Handler='handler.handler',
        Runtime=lambda_api.LAMBDA_RUNTIME_PYTHON27, Role='r1',
        Code={
            'S3Bucket': bucket_name,
            'S3Key': bucket_key
        }
    )

    # invoke lambda function
    data_before = b'{"foo": "bar"}'
    result = lambda_client.invoke(FunctionName=lambda_name, Payload=data_before)
    data_after = result['Payload'].read()
    assert json.loads(to_str(data_before)) == json.loads(to_str(data_after))
コード例 #12
0
ファイル: test_lambda.py プロジェクト: zabawaba99/localstack
def test_lambda_runtimes():

    lambda_client = aws_stack.connect_to_service('lambda')

    # deploy and invoke lambda - Python
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=['localstack'], runtime=LAMBDA_RUNTIME_PYTHON27)
    response = testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY,
        zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27)
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY, Payload='{}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert to_str(result_data).strip() == '{}'

    # deploy and invoke lambda - Java
    zip_file = testutil.create_zip_file(TEST_LAMBDA_JAVA, get_content=True)
    response = testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JAVA, zip_file=zip_file,
        runtime=LAMBDA_RUNTIME_JAVA8, handler='com.atlassian.localstack.sample.LambdaHandler')
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, Payload='{}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert to_str(result_data).strip() == '{}'

    if use_docker():
        # deploy and invoke lambda - Node.js
        zip_file = testutil.create_zip_file(TEST_LAMBDA_NODEJS, get_content=True)
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JS,
            zip_file=zip_file, runtime=LAMBDA_RUNTIME_NODEJS)
        result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JS, Payload='{}')
        assert result['StatusCode'] == 200
        result_data = result['Payload'].read()
        assert to_str(result_data).strip() == '{}'
コード例 #13
0
ファイル: test_lambda.py プロジェクト: lucianthorr/localstack
    def test_lambda_environment(self):
        vars = {'Hello': 'World'}
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_ENV),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON27)
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_ENV,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON27,
                                        envvars=vars)

        # invoke function and assert result contains env vars
        result = self.lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_ENV,
                                           Payload=b'{}')
        result_data = result['Payload']
        self.assertEqual(result['StatusCode'], 200)
        self.assertDictEqual(json.load(result_data), vars)

        # get function config and assert result contains env vars
        result = self.lambda_client.get_function_configuration(
            FunctionName=TEST_LAMBDA_NAME_ENV)
        self.assertEqual(result['Environment'], {'Variables': vars})

        # clean up
        testutil.delete_lambda_function(TEST_LAMBDA_NAME_ENV)
コード例 #14
0
    def test_code_updated_on_redeployment(self):
        lambda_api.LAMBDA_EXECUTOR.cleanup()

        func_name = "test_code_updated_on_redeployment"

        # deploy function for the first time
        testutil.create_lambda_function(
            func_name=func_name,
            handler_file=TEST_LAMBDA_ENV,
            libs=TEST_LAMBDA_LIBS,
            envvars={"Hello": "World"},
        )

        # test first invocation
        result = self.lambda_client.invoke(FunctionName=func_name, Payload=b"{}")
        payload = json.loads(to_str(result["Payload"].read()))

        assert payload["Hello"] == "World"

        # replacement code
        updated_handler = "handler = lambda event, context: {'Hello': 'Elon Musk'}"
        updated_handler = testutil.create_lambda_archive(
            updated_handler, libs=TEST_LAMBDA_LIBS, get_content=True
        )
        self.lambda_client.update_function_code(FunctionName=func_name, ZipFile=updated_handler)

        # second invocation should exec updated lambda code
        result = self.lambda_client.invoke(FunctionName=func_name, Payload=b"{}")
        payload = json.loads(to_str(result["Payload"].read()))

        assert payload["Hello"] == "Elon Musk"
コード例 #15
0
def test_api_gateway_lambda_proxy_integration():
    # create lambda function
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON),
                                              get_content=True,
                                              libs=TEST_LAMBDA_LIBS,
                                              runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_PROXY_BACKEND,
                                    zip_file=zip_file,
                                    runtime=LAMBDA_RUNTIME_PYTHON27)

    # create API Gateway and connect it to the Lambda proxy backend
    lambda_uri = aws_stack.lambda_function_arn(TEST_LAMBDA_PROXY_BACKEND)
    target_uri = 'arn:aws:apigateway:%s:lambda:path/2015-03-31/functions/%s/invocations' % (
        DEFAULT_REGION, lambda_uri)
    result = connect_api_gateway_to_http_with_lambda_proxy(
        'test_gateway2', target_uri, path=API_PATH_LAMBDA_PROXY_BACKEND)

    # make test request to gateway and check response
    path = API_PATH_LAMBDA_PROXY_BACKEND.replace('{test_param1}', 'foo1')
    url = INBOUND_GATEWAY_URL_PATTERN.format(api_id=result['id'],
                                             stage_name=TEST_STAGE_NAME,
                                             path=path)
    data = {'return_status_code': 203, 'return_headers': {'foo': 'bar123'}}
    result = requests.post(url, data=json.dumps(data))
    assert result.status_code == 203
    assert result.headers.get('foo') == 'bar123'
    parsed_body = json.loads(to_str(result.content))
    assert parsed_body.get('return_status_code') == 203
    assert parsed_body.get('return_headers') == {'foo': 'bar123'}
    assert parsed_body.get('pathParameters') == {'test_param1': 'foo1'}
    result = requests.delete(url, data=json.dumps(data))
    assert result.status_code == 404
コード例 #16
0
ファイル: test_integration.py プロジェクト: bbc/localstack
def test_kinesis_lambda_forward_chain():
    kinesis = aws_stack.connect_to_service('kinesis')
    s3 = aws_stack.connect_to_service('s3')

    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM1_NAME, delete=True)
    aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM2_NAME, delete=True)
    s3.create_bucket(Bucket=TEST_BUCKET_NAME)

    # deploy test lambdas connected to Kinesis streams
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA1_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM1_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_CHAIN_LAMBDA2_NAME, zip_file=zip_file,
        event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM2_NAME), runtime=LAMBDA_RUNTIME_PYTHON27)

    # publish test record
    test_data = {'test_data': 'forward_chain_data_%s' % short_uid()}
    data = clone(test_data)
    data[lambda_integration.MSG_BODY_MESSAGE_TARGET] = 'kinesis:%s' % TEST_CHAIN_STREAM2_NAME
    kinesis.put_record(Data=to_bytes(json.dumps(data)), PartitionKey='testId', StreamName=TEST_CHAIN_STREAM1_NAME)

    # check results
    time.sleep(5)
    all_objects = testutil.list_all_s3_objects()
    testutil.assert_objects(test_data, all_objects)
コード例 #17
0
def test_lambda_runtimes():

    lambda_client = aws_stack.connect_to_service('lambda')

    # deploy and invoke lambda - Python
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    response = testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY,
        zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27)
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY, Payload=b'{}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert to_str(result_data).strip() == '{}'

    # deploy and invoke lambda - Java
    if not os.path.exists(TEST_LAMBDA_JAVA):
        mkdir(os.path.dirname(TEST_LAMBDA_JAVA))
        download(TEST_LAMBDA_JAR_URL, TEST_LAMBDA_JAVA)
    zip_file = testutil.create_zip_file(TEST_LAMBDA_JAVA, get_content=True)
    response = testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JAVA, zip_file=zip_file,
        runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.LambdaHandler')
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, Payload=b'{}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert to_str(result_data).strip() == '{}'

    if use_docker():
        # deploy and invoke lambda - Node.js
        zip_file = testutil.create_zip_file(TEST_LAMBDA_NODEJS, get_content=True)
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JS,
            zip_file=zip_file, handler='lambda_integration.handler', runtime=LAMBDA_RUNTIME_NODEJS)
        result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JS, Payload=b'{}')
        assert result['StatusCode'] == 200
        result_data = result['Payload'].read()
        assert to_str(result_data).strip() == '{}'
コード例 #18
0
def test_kinesis_lambda_forward_chain(kinesis_client, s3_client, lambda_client,
                                      create_lambda_function):

    try:
        aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM1_NAME, delete=True)
        aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM2_NAME, delete=True)
        s3_client.create_bucket(Bucket=TEST_BUCKET_NAME)

        # deploy test lambdas connected to Kinesis streams
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON),
            get_content=True,
            libs=TEST_LAMBDA_LIBS)
        lambda_1_resp = create_lambda_function(
            func_name=TEST_CHAIN_LAMBDA1_NAME,
            zip_file=zip_file,
            event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM1_NAME),
            starting_position="TRIM_HORIZON",
        )
        lambda_1_event_source_uuid = lambda_1_resp[
            "CreateEventSourceMappingResponse"]["UUID"]
        lambda_2_resp = create_lambda_function(
            func_name=TEST_CHAIN_LAMBDA2_NAME,
            zip_file=zip_file,
            event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM2_NAME),
            starting_position="TRIM_HORIZON",
        )
        lambda_2_event_source_uuid = lambda_2_resp[
            "CreateEventSourceMappingResponse"]["UUID"]

        # publish test record
        test_data = {
            "test_data":
            "forward_chain_data_%s with 'quotes\\\"" % short_uid()
        }
        data = clone(test_data)
        data[lambda_integration.
             MSG_BODY_MESSAGE_TARGET] = "kinesis:%s" % TEST_CHAIN_STREAM2_NAME
        LOGGER.debug("put record")
        kinesis_client.put_record(
            Data=to_bytes(json.dumps(data)),
            PartitionKey="testId",
            StreamName=TEST_CHAIN_STREAM1_NAME,
        )

        def check_results():
            LOGGER.debug("check results")
            all_objects = testutil.list_all_s3_objects()
            testutil.assert_objects(test_data, all_objects)

        # check results
        retry(check_results, retries=10, sleep=3)
    finally:
        # clean up
        kinesis_client.delete_stream(StreamName=TEST_CHAIN_STREAM1_NAME)
        kinesis_client.delete_stream(StreamName=TEST_CHAIN_STREAM2_NAME)
        lambda_client.delete_event_source_mapping(
            UUID=lambda_1_event_source_uuid)
        lambda_client.delete_event_source_mapping(
            UUID=lambda_2_event_source_uuid)
コード例 #19
0
 def create_lambda_function(self, fn_name):
     zip_file = testutil.create_lambda_archive(
         load_file(TEST_LAMBDA_PYTHON),
         get_content=True,
         libs=TEST_LAMBDA_LIBS,
         runtime=LAMBDA_RUNTIME_PYTHON27)
     testutil.create_lambda_function(func_name=fn_name,
                                     zip_file=zip_file,
                                     runtime=LAMBDA_RUNTIME_PYTHON27)
コード例 #20
0
 def create_function(cls, file, name, runtime=None, libs=None):
     runtime = runtime or LAMBDA_RUNTIME_PYTHON27
     zip_file = testutil.create_lambda_archive(load_file(file),
                                               get_content=True,
                                               libs=libs,
                                               runtime=runtime)
     testutil.create_lambda_function(func_name=name,
                                     zip_file=zip_file,
                                     runtime=runtime)
コード例 #21
0
def test_kinesis_lambda_ddb_streams(env=ENV_DEV):

    dynamodb = aws_stack.connect_to_resource('dynamodb', env=env)
    dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams', env=env)
    kinesis = aws_stack.connect_to_service('kinesis', env=env)

    print('Creating stream...')
    aws_stack.create_kinesis_stream(TEST_STREAM_NAME)

    # subscribe to inbound Kinesis stream
    def process_records(records, shard_id):
        EVENTS.extend(records)

    # start the KCL client process in the background
    kinesis_connector.listen_to_kinesis(TEST_STREAM_NAME, listener_func=process_records,
        wait_until_started=True)

    print("Kinesis consumer initialized.")

    # create table with stream forwarding config
    testutil.create_dynamodb_table(TEST_TABLE_NAME, partition_key=PARTITION_KEY,
        env=env, stream_view_type='NEW_AND_OLD_IMAGES')

    # list streams and make sure the table stream is there
    streams = dynamodbstreams.list_streams()
    event_source_arn = None
    for stream in streams['Streams']:
        if stream['TableName'] == TEST_TABLE_NAME:
            event_source_arn = stream['StreamArn']
    assert event_source_arn

    # deploy test lambda
    script = load_file(os.path.join(LOCALSTACK_ROOT_FOLDER, 'tests', 'lambdas', 'lambda_integration.py'))
    zip_file = testutil.create_lambda_archive(script, get_content=True)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME, zip_file=zip_file, event_source_arn=event_source_arn)

    # put items to table
    num_events = 10
    print('Putting %s items to table...' % num_events)
    table = dynamodb.Table(TEST_TABLE_NAME)
    for i in range(0, num_events):
        table.put_item(Item={
            PARTITION_KEY: 'testId123',
            'data': 'foobar123'
        })

    print("Waiting some time before finishing test.")
    time.sleep(5)

    print('DynamoDB updates retrieved via Kinesis (actual/expected): %s/%s' % (len(EVENTS), num_events))
    if len(EVENTS) != num_events:
        print('ERROR receiving DynamoDB updates. Running processes:')
        print(run("ps aux | grep 'python\|java\|node'"))
    assert len(EVENTS) == num_events
コード例 #22
0
ファイル: test_lambda.py プロジェクト: ssmbcloud/localstack
    def test_invocation_with_qualifier(self):
        lambda_name = 'test_lambda_%s' % short_uid()
        bucket_name = 'test_bucket_lambda2'
        bucket_key = 'test_lambda.zip'

        # upload zip file to S3
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON27
        )
        self.s3_client.create_bucket(Bucket=bucket_name)
        self.s3_client.upload_fileobj(
            BytesIO(zip_file), bucket_name, bucket_key)

        # create lambda function
        response = self.lambda_client.create_function(
            FunctionName=lambda_name, Handler='handler.handler',
            Runtime=lambda_api.LAMBDA_RUNTIME_PYTHON27, Role='r1',
            Code={
                'S3Bucket': bucket_name,
                'S3Key': bucket_key
            },
            Publish=True
        )

        self.assertIn('Version', response)

        # invoke lambda function
        data_before = b'{"foo": "bar with \'quotes\\""}'
        result = self.lambda_client.invoke(
            FunctionName=lambda_name,
            Payload=data_before,
            Qualifier=response['Version']
        )
        data_after = json.loads(result['Payload'].read())
        self.assertEqual(json.loads(to_str(data_before)), data_after['event'])

        context = data_after['context']
        self.assertEqual(response['Version'], context['function_version'])
        self.assertEqual(lambda_name, context['function_name'])

        # assert that logs are present
        expected = ['Lambda log message - print function']
        if use_docker():
            # Note that during regular test execution, nosetests captures the output from
            # the logging module - hence we can only expect this when running in Docker
            expected.append('.*Lambda log message - logging module')
        self.check_lambda_logs(lambda_name, expected_lines=expected)

        # clean up
        testutil.delete_lambda_function(lambda_name)
コード例 #23
0
ファイル: test_lambda.py プロジェクト: lucianthorr/localstack
    def setUpClass(cls):
        cls.lambda_client = aws_stack.connect_to_service('lambda')
        cls.s3_client = aws_stack.connect_to_service('s3')

        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON27)
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON27)
コード例 #24
0
ファイル: test_lambda.py プロジェクト: bbc/localstack
def test_lambda_environment():

    lambda_client = aws_stack.connect_to_service('lambda')

    # deploy and invoke lambda without Docker
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_ENV), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_ENV,
        zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27, envvars={'Hello': 'World'})
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_ENV, Payload=b'{}')
    assert result['StatusCode'] == 200
    result_data = result['Payload']
    assert json.load(result_data) == {'Hello': 'World'}
コード例 #25
0
def test_lambda_environment():

    lambda_client = aws_stack.connect_to_service('lambda')

    # deploy and invoke lambda without Docker
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_ENV), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_ENV,
        zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27, envvars={'Hello': 'World'})
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_ENV, Payload=b'{}')
    assert result['StatusCode'] == 200
    result_data = result['Payload']
    assert json.load(result_data) == {'Hello': 'World'}
コード例 #26
0
ファイル: test_lambda.py プロジェクト: yamachu/localstack
    def test_handler_in_submodule(self):
        func_name = 'lambda-%s' % short_uid()
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON), get_content=True,
            libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON36,
            file_name='abc/def/main.py')
        testutil.create_lambda_function(func_name=func_name, zip_file=zip_file,
            handler='abc.def.main.handler', runtime=LAMBDA_RUNTIME_PYTHON36)

        # invoke function and assert result
        result = self.lambda_client.invoke(FunctionName=func_name, Payload=b'{}')
        result_data = json.loads(result['Payload'].read())
        self.assertEqual(result['StatusCode'], 200)
        self.assertEqual(result_data['event'], json.loads('{}'))
コード例 #27
0
def setup_and_tear_down():

    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_ENV),
                                              get_content=True)
    zip_file2 = testutil.create_lambda_archive(
        load_file(TEST_LAMBDA_PYTHON_ECHO), get_content=True)
    testutil.create_lambda_function(
        func_name=TEST_LAMBDA_NAME_1,
        zip_file=zip_file,
        envvars={"Hello": TEST_RESULT_VALUE},
    )
    testutil.create_lambda_function(
        func_name=TEST_LAMBDA_NAME_2,
        zip_file=zip_file,
        envvars={"Hello": TEST_RESULT_VALUE},
    )
    testutil.create_lambda_function(
        func_name=TEST_LAMBDA_NAME_3,
        zip_file=zip_file,
        envvars={"Hello": "Replace Value"},
    )
    testutil.create_lambda_function(
        func_name=TEST_LAMBDA_NAME_4,
        zip_file=zip_file,
        envvars={"Hello": TEST_RESULT_VALUE},
    )
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_5,
                                    zip_file=zip_file2)

    yield

    testutil.delete_lambda_function(name=TEST_LAMBDA_NAME_1)
    testutil.delete_lambda_function(name=TEST_LAMBDA_NAME_2)
    testutil.delete_lambda_function(name=TEST_LAMBDA_NAME_3)
    testutil.delete_lambda_function(name=TEST_LAMBDA_NAME_4)
    testutil.delete_lambda_function(name=TEST_LAMBDA_NAME_5)
コード例 #28
0
    def test_lambda_environment(self):
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_ENV),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON27)
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_ENV,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON27,
                                        envvars={'Hello': 'World'})
        result = self.lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_ENV,
                                           Payload=b'{}')
        result_data = result['Payload']

        self.assertEqual(result['StatusCode'], 200)
        self.assertDictEqual(json.load(result_data), {'Hello': 'World'})
コード例 #29
0
    def test_run_lambda(self):
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON3),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON36)

        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY3,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON36)

        lambda_client = aws_stack.connect_to_service('lambda')

        result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY3,
                                      Payload=b'{}')

        self.assertEqual(result['StatusCode'], 200)
コード例 #30
0
    def test_dead_letter_queue_execution(self):
        lambda_client = aws_stack.connect_to_service('lambda')

        # create SQS queue with DLQ redrive policy
        queue_name1 = 'test-%s' % short_uid()
        queue_name2 = 'test-%s' % short_uid()
        queue_url1 = self.client.create_queue(
            QueueName=queue_name1)['QueueUrl']
        queue_arn1 = aws_stack.sqs_queue_arn(queue_name1)
        policy = {'deadLetterTargetArn': queue_arn1, 'maxReceiveCount': 1}
        queue_url2 = self.client.create_queue(
            QueueName=queue_name2,
            Attributes={'RedrivePolicy': json.dumps(policy)})['QueueUrl']
        queue_arn2 = aws_stack.sqs_queue_arn(queue_name2)

        # create Lambda and add source mapping
        lambda_name = 'test-%s' % short_uid()
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON36)
        testutil.create_lambda_function(func_name=lambda_name,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON36)
        lambda_client.create_event_source_mapping(EventSourceArn=queue_arn2,
                                                  FunctionName=lambda_name)

        # add message to SQS, which will trigger the Lambda, resulting in an error
        payload = {lambda_integration.MSG_BODY_RAISE_ERROR_FLAG: 1}
        self.client.send_message(QueueUrl=queue_url2,
                                 MessageBody=json.dumps(payload))

        # assert that message has been received on the DLQ
        def receive_dlq():
            result = self.client.receive_message(QueueUrl=queue_url1,
                                                 MessageAttributeNames=['All'])
            self.assertGreater(len(result['Messages']), 0)
            msg_attrs = result['Messages'][0]['MessageAttributes']
            self.assertIn('RequestID', msg_attrs)
            self.assertIn('ErrorCode', msg_attrs)
            self.assertIn('ErrorMessage', msg_attrs)

        retry(receive_dlq, retries=8, sleep=2)
コード例 #31
0
ファイル: test_lambda.py プロジェクト: ssmbcloud/localstack
    def test_destroy_idle_containers(self):
        # run these tests only for the "reuse containers" Lambda executor
        if not isinstance(lambda_api.LAMBDA_EXECUTOR,
                          lambda_executors.LambdaExecutorReuseContainers):
            return

        executor = lambda_api.LAMBDA_EXECUTOR
        func_name = 'test_destroy_idle_containers'
        func_arn = lambda_api.func_arn(func_name)

        # make sure existing containers are gone
        executor.destroy_existing_docker_containers()
        self.assertEqual(len(executor.get_all_container_names()), 0)

        # deploy and invoke lambda without Docker
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_ENV),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON27
        )
        testutil.create_lambda_function(
            func_name=func_name,
            zip_file=zip_file,
            runtime=LAMBDA_RUNTIME_PYTHON27,
            envvars={'Hello': 'World'}
        )

        self.assertEqual(len(executor.get_all_container_names()), 0)

        self.lambda_client.invoke(FunctionName=func_name, Payload=b'{}')
        self.assertEqual(len(executor.get_all_container_names()), 1)

        # try to destroy idle containers.
        executor.idle_container_destroyer()
        self.assertEqual(len(executor.get_all_container_names()), 1)

        # simulate an idle container
        executor.function_invoke_times[func_arn] = time.time() - lambda_executors.MAX_CONTAINER_IDLE_TIME_MS
        executor.idle_container_destroyer()
        self.assertEqual(len(executor.get_all_container_names()), 0)

        # clean up
        testutil.delete_lambda_function(func_name)
コード例 #32
0
    def test_kinesis_lambda_forward_chain(self):
        kinesis = aws_stack.connect_to_service("kinesis")
        s3 = aws_stack.connect_to_service("s3")

        aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM1_NAME, delete=True)
        aws_stack.create_kinesis_stream(TEST_CHAIN_STREAM2_NAME, delete=True)
        s3.create_bucket(Bucket=TEST_BUCKET_NAME)

        # deploy test lambdas connected to Kinesis streams
        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON), get_content=True, libs=TEST_LAMBDA_LIBS
        )
        testutil.create_lambda_function(
            func_name=TEST_CHAIN_LAMBDA1_NAME,
            zip_file=zip_file,
            event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM1_NAME),
        )
        testutil.create_lambda_function(
            func_name=TEST_CHAIN_LAMBDA2_NAME,
            zip_file=zip_file,
            event_source_arn=get_event_source_arn(TEST_CHAIN_STREAM2_NAME),
        )

        # publish test record
        test_data = {"test_data": "forward_chain_data_%s with 'quotes\\\"" % short_uid()}
        data = clone(test_data)
        data[lambda_integration.MSG_BODY_MESSAGE_TARGET] = "kinesis:%s" % TEST_CHAIN_STREAM2_NAME
        kinesis.put_record(
            Data=to_bytes(json.dumps(data)),
            PartitionKey="testId",
            StreamName=TEST_CHAIN_STREAM1_NAME,
        )

        def check_results():
            all_objects = testutil.list_all_s3_objects()
            testutil.assert_objects(test_data, all_objects)

        # check results
        retry(check_results, retries=5, sleep=3)

        # clean up
        kinesis.delete_stream(StreamName=TEST_CHAIN_STREAM1_NAME)
        kinesis.delete_stream(StreamName=TEST_CHAIN_STREAM2_NAME)
コード例 #33
0
ファイル: test_sns.py プロジェクト: sfdc-ajith/localstack
    def test_redrive_policy_lambda_subscription(self):
        self.unsubscripe_all_from_sns()

        lambda_name = 'test-%s' % short_uid()
        lambda_arn = aws_stack.lambda_function_arn(lambda_name)

        zip_file = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON),
            get_content=True,
            libs=TEST_LAMBDA_LIBS,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )
        testutil.create_lambda_function(func_name=lambda_name,
                                        zip_file=zip_file,
                                        runtime=LAMBDA_RUNTIME_PYTHON36)

        subscription = self.sns_client.subscribe(TopicArn=self.topic_arn,
                                                 Protocol='lambda',
                                                 Endpoint=lambda_arn)

        self.sns_client.set_subscription_attributes(
            SubscriptionArn=subscription['SubscriptionArn'],
            AttributeName='RedrivePolicy',
            AttributeValue=json.dumps({
                'deadLetterTargetArn':
                aws_stack.sqs_queue_arn(TEST_QUEUE_DLQ_NAME)
            }))
        testutil.delete_lambda_function(lambda_name)

        self.sns_client.publish(TopicArn=self.topic_arn,
                                Message=json.dumps(
                                    {'message': 'test_redrive_policy'}))

        def receive_dlq():
            result = self.sqs_client.receive_message(
                QueueUrl=self.dlq_url, MessageAttributeNames=['All'])
            self.assertGreater(len(result['Messages']), 0)
            self.assertEqual(
                json.loads(
                    json.loads(result['Messages'][0]['Body'])['Message'][0])
                ['message'], 'test_redrive_policy')

        retry(receive_dlq, retries=10, sleep=2)
コード例 #34
0
ファイル: test_lambda.py プロジェクト: bbc/localstack
def test_destroy_idle_containers():

    # run these tests only for the "reuse containers" Lambda executor
    if not isinstance(lambda_api.LAMBDA_EXECUTOR, lambda_executors.LambdaExecutorReuseContainers):
        return

    executor = lambda_api.LAMBDA_EXECUTOR
    func_name = 'test_destroy_idle_containers'

    # create a new lambda
    lambda_client = aws_stack.connect_to_service('lambda')

    func_arn = lambda_api.func_arn(func_name)

    # make sure existing containers are gone
    executor.destroy_existing_docker_containers()
    assert len(executor.get_all_container_names()) == 0

    # deploy and invoke lambda without Docker
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_ENV), get_content=True,
                                              libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=func_name,
                                    zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27, envvars={'Hello': 'World'})

    assert len(executor.get_all_container_names()) == 0

    lambda_client.invoke(FunctionName=func_name, Payload=b'{}')
    assert len(executor.get_all_container_names()) == 1

    # try to destroy idle containers.
    executor.idle_container_destroyer()
    assert len(executor.get_all_container_names()) == 1

    # simulate an idle container
    executor.function_invoke_times[func_arn] = time.time() - 610
    executor.idle_container_destroyer()
    assert len(executor.get_all_container_names()) == 0
コード例 #35
0
ファイル: test_api_gateway.py プロジェクト: bbc/localstack
def test_api_gateway_lambda_proxy_integration():
    # create lambda function
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_PROXY_BACKEND,
        zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27)

    # create API Gateway and connect it to the Lambda proxy backend
    lambda_uri = aws_stack.lambda_function_arn(TEST_LAMBDA_PROXY_BACKEND)
    target_uri = 'arn:aws:apigateway:%s:lambda:path/2015-03-31/functions/%s/invocations' % (DEFAULT_REGION, lambda_uri)
    result = connect_api_gateway_to_http_with_lambda_proxy('test_gateway2', target_uri,
        path=API_PATH_LAMBDA_PROXY_BACKEND)

    # make test request to gateway and check response
    path = API_PATH_LAMBDA_PROXY_BACKEND.replace('{test_param1}', 'foo1')
    url = INBOUND_GATEWAY_URL_PATTERN.format(api_id=result['id'], stage_name=TEST_STAGE_NAME, path=path)
    data = {'return_status_code': 203, 'return_headers': {'foo': 'bar123'}}
    result = requests.post(url, data=json.dumps(data))
    assert result.status_code == 203
    assert result.headers.get('foo') == 'bar123'
    parsed_body = json.loads(to_str(result.content))
    assert parsed_body.get('return_status_code') == 203
    assert parsed_body.get('return_headers') == {'foo': 'bar123'}
    assert parsed_body.get('pathParameters') == {'test_param1': 'foo1'}
コード例 #36
0
ファイル: test_integration.py プロジェクト: bbc/localstack
def test_kinesis_lambda_sns_ddb_streams():

    ddb_lease_table_suffix = '-kclapp'
    dynamodb = aws_stack.connect_to_resource('dynamodb')
    dynamodb_service = aws_stack.connect_to_service('dynamodb')
    dynamodbstreams = aws_stack.connect_to_service('dynamodbstreams')
    kinesis = aws_stack.connect_to_service('kinesis')
    sns = aws_stack.connect_to_service('sns')

    LOGGER.info('Creating test streams...')
    run_safe(lambda: dynamodb_service.delete_table(
        TableName=TEST_STREAM_NAME + ddb_lease_table_suffix), print_error=False)
    aws_stack.create_kinesis_stream(TEST_STREAM_NAME, delete=True)
    aws_stack.create_kinesis_stream(TEST_LAMBDA_SOURCE_STREAM_NAME)

    # subscribe to inbound Kinesis stream
    def process_records(records, shard_id):
        EVENTS.extend(records)

    # start the KCL client process in the background
    kinesis_connector.listen_to_kinesis(TEST_STREAM_NAME, listener_func=process_records,
        wait_until_started=True, ddb_lease_table_suffix=ddb_lease_table_suffix)

    LOGGER.info('Kinesis consumer initialized.')

    # create table with stream forwarding config
    testutil.create_dynamodb_table(TEST_TABLE_NAME, partition_key=PARTITION_KEY,
        stream_view_type='NEW_AND_OLD_IMAGES')

    # list DDB streams and make sure the table stream is there
    streams = dynamodbstreams.list_streams()
    ddb_event_source_arn = None
    for stream in streams['Streams']:
        if stream['TableName'] == TEST_TABLE_NAME:
            ddb_event_source_arn = stream['StreamArn']
    assert ddb_event_source_arn

    # deploy test lambda connected to DynamoDB Stream
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_DDB,
        zip_file=zip_file, event_source_arn=ddb_event_source_arn, runtime=LAMBDA_RUNTIME_PYTHON27)
    # make sure we cannot create Lambda with same name twice
    assert_raises(Exception, testutil.create_lambda_function, func_name=TEST_LAMBDA_NAME_DDB,
        zip_file=zip_file, event_source_arn=ddb_event_source_arn, runtime=LAMBDA_RUNTIME_PYTHON27)

    # deploy test lambda connected to Kinesis Stream
    kinesis_event_source_arn = kinesis.describe_stream(
        StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)['StreamDescription']['StreamARN']
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_STREAM,
        zip_file=zip_file, event_source_arn=kinesis_event_source_arn, runtime=LAMBDA_RUNTIME_PYTHON27)

    # set number of items to update/put to table
    num_events_ddb = 15
    num_put_new_items = 5
    num_put_existing_items = 2
    num_batch_items = 3
    num_updates_ddb = num_events_ddb - num_put_new_items - num_put_existing_items - num_batch_items

    LOGGER.info('Putting %s items to table...' % num_events_ddb)
    table = dynamodb.Table(TEST_TABLE_NAME)
    for i in range(0, num_put_new_items):
        table.put_item(Item={
            PARTITION_KEY: 'testId%s' % i,
            'data': 'foobar123'
        })
    # Put items with an already existing ID (fix https://github.com/localstack/localstack/issues/522)
    for i in range(0, num_put_existing_items):
        table.put_item(Item={
            PARTITION_KEY: 'testId%s' % i,
            'data': 'foobar123_put_existing'
        })

    # batch write some items containing non-ASCII characters
    dynamodb.batch_write_item(RequestItems={TEST_TABLE_NAME: [
        {'PutRequest': {'Item': {PARTITION_KEY: short_uid(), 'data': 'foobar123 ✓'}}},
        {'PutRequest': {'Item': {PARTITION_KEY: short_uid(), 'data': 'foobar123 £'}}},
        {'PutRequest': {'Item': {PARTITION_KEY: short_uid(), 'data': 'foobar123 ¢'}}}
    ]})
    # update some items, which also triggers notification events
    for i in range(0, num_updates_ddb):
        dynamodb_service.update_item(TableName=TEST_TABLE_NAME,
            Key={PARTITION_KEY: {'S': 'testId%s' % i}},
            AttributeUpdates={'data': {
                'Action': 'PUT',
                'Value': {'S': 'foobar123_updated'}
            }})

    # put items to stream
    num_events_kinesis = 10
    LOGGER.info('Putting %s items to stream...' % num_events_kinesis)
    kinesis.put_records(
        Records=[
            {
                'Data': '{}',
                'PartitionKey': 'testId%s' % i
            } for i in range(0, num_events_kinesis)
        ], StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME
    )

    # put 1 item to stream that will trigger an error in the Lambda
    kinesis.put_record(Data='{"%s": 1}' % lambda_integration.MSG_BODY_RAISE_ERROR_FLAG,
        PartitionKey='testIderror', StreamName=TEST_LAMBDA_SOURCE_STREAM_NAME)

    # create SNS topic, connect it to the Lambda, publish test message
    num_events_sns = 3
    response = sns.create_topic(Name=TEST_TOPIC_NAME)
    sns.subscribe(TopicArn=response['TopicArn'], Protocol='lambda',
        Endpoint=aws_stack.lambda_function_arn(TEST_LAMBDA_NAME_STREAM))
    for i in range(0, num_events_sns):
        sns.publish(TopicArn=response['TopicArn'], Message='test message %s' % i)

    # get latest records
    latest = aws_stack.kinesis_get_latest_records(TEST_LAMBDA_SOURCE_STREAM_NAME,
        shard_id='shardId-000000000000', count=10)
    assert len(latest) == 10

    LOGGER.info('Waiting some time before finishing test.')
    time.sleep(2)

    num_events = num_events_ddb + num_events_kinesis + num_events_sns

    def check_events():
        if len(EVENTS) != num_events:
            LOGGER.warning(('DynamoDB and Kinesis updates retrieved ' +
                '(actual/expected): %s/%s') % (len(EVENTS), num_events))
        assert len(EVENTS) == num_events
        event_items = [json.loads(base64.b64decode(e['data'])) for e in EVENTS]
        inserts = [e for e in event_items if e.get('__action_type') == 'INSERT']
        modifies = [e for e in event_items if e.get('__action_type') == 'MODIFY']
        assert len(inserts) == num_put_new_items + num_batch_items
        assert len(modifies) == num_put_existing_items + num_updates_ddb

    # this can take a long time in CI, make sure we give it enough time/retries
    retry(check_events, retries=7, sleep=3)

    # make sure the we have the right amount of INSERT/MODIFY event types

    # check cloudwatch notifications
    stats1 = get_lambda_metrics(TEST_LAMBDA_NAME_STREAM)
    assert len(stats1['Datapoints']) == 2 + num_events_sns
    stats2 = get_lambda_metrics(TEST_LAMBDA_NAME_STREAM, 'Errors')
    assert len(stats2['Datapoints']) == 1
    stats3 = get_lambda_metrics(TEST_LAMBDA_NAME_DDB)
    assert len(stats3['Datapoints']) == num_events_ddb
コード例 #37
0
ファイル: test_lambda.py プロジェクト: bbc/localstack
def test_lambda_runtimes():

    lambda_client = aws_stack.connect_to_service('lambda')

    # deploy and invoke lambda - Python 2.7
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON), get_content=True,
        libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY,
        zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON27)
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY, Payload=b'{}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert to_str(result_data).strip() == '{}'

    if use_docker():
        # deploy and invoke lambda - Python 3.6
        zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_PYTHON3), get_content=True,
            libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON36)
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_PY3,
            zip_file=zip_file, runtime=LAMBDA_RUNTIME_PYTHON36)
        result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_PY3, Payload=b'{}')
        assert result['StatusCode'] == 200
        result_data = result['Payload'].read()
        assert to_str(result_data).strip() == '{}'

    # deploy and invoke lambda - Java
    if not os.path.exists(TEST_LAMBDA_JAVA):
        mkdir(os.path.dirname(TEST_LAMBDA_JAVA))
        download(TEST_LAMBDA_JAR_URL, TEST_LAMBDA_JAVA)
    zip_file = testutil.create_zip_file(TEST_LAMBDA_JAVA, get_content=True)
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JAVA, zip_file=zip_file,
        runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.LambdaHandler')
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, Payload=b'{}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert 'LinkedHashMap' in to_str(result_data)

    # test SNSEvent
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, InvocationType='Event',
                                  Payload=b'{"Records": [{"Sns": {"Message": "{}"}}]}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert json.loads(to_str(result_data)) == {'async': 'True'}

    # test DDBEvent
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA, InvocationType='Event',
                                  Payload=b'{"Records": [{"dynamodb": {"Message": "{}"}}]}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert json.loads(to_str(result_data)) == {'async': 'True'}

    # test KinesisEvent
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA,
                                  Payload=b'{"Records": [{"Kinesis": {"Data": "data", "PartitionKey": "partition"}}]}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert 'KinesisEvent' in to_str(result_data)

    # deploy and invoke lambda - Java with stream handler
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JAVA_STREAM, zip_file=zip_file,
        runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.LambdaStreamHandler')
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA_STREAM, Payload=b'{}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert to_str(result_data).strip() == '{}'

    # deploy and invoke lambda - Java with serializable input object
    testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JAVA_SERIALIZABLE, zip_file=zip_file,
        runtime=LAMBDA_RUNTIME_JAVA8, handler='cloud.localstack.sample.SerializedInputLambdaHandler')
    result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JAVA_SERIALIZABLE,
                                  Payload=b'{"bucket": "test_bucket", "key": "test_key"}')
    assert result['StatusCode'] == 200
    result_data = result['Payload'].read()
    assert json.loads(to_str(result_data)) == {'validated': True, 'bucket': 'test_bucket', 'key': 'test_key'}

    if use_docker():
        # deploy and invoke lambda - Node.js
        zip_file = testutil.create_zip_file(TEST_LAMBDA_NODEJS, get_content=True)
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_JS,
            zip_file=zip_file, handler='lambda_integration.handler', runtime=LAMBDA_RUNTIME_NODEJS)
        result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_JS, Payload=b'{}')
        assert result['StatusCode'] == 200
        result_data = result['Payload'].read()
        assert to_str(result_data).strip() == '{}'

        # deploy and invoke - .NET Core 2.0. Its already a zip
        zip_file = TEST_LAMBDA_DOTNETCORE2
        zip_file_content = None
        with open(zip_file, 'rb') as file_obj:
            zip_file_content = file_obj.read()
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_DOTNETCORE2, zip_file=zip_file_content,
            handler='DotNetCore2::DotNetCore2.Lambda.Function::SimpleFunctionHandler',
            runtime=LAMBDA_RUNTIME_DOTNETCORE2)
        result = lambda_client.invoke(FunctionName=TEST_LAMBDA_NAME_DOTNETCORE2, Payload=b'{}')
        assert result['StatusCode'] == 200
        result_data = result['Payload'].read()
        assert to_str(result_data).strip() == '{}'
コード例 #38
0
ファイル: test_lambda.py プロジェクト: bbc/localstack
def test_prime_and_destroy_containers():

    # run these tests only for the "reuse containers" Lambda executor
    if not isinstance(lambda_api.LAMBDA_EXECUTOR, lambda_executors.LambdaExecutorReuseContainers):
        return

    executor = lambda_api.LAMBDA_EXECUTOR
    func_name = 'test_prime_and_destroy_containers'

    # create a new lambda
    lambda_client = aws_stack.connect_to_service('lambda')

    func_arn = lambda_api.func_arn(func_name)

    # make sure existing containers are gone
    executor.cleanup()
    assert len(executor.get_all_container_names()) == 0

    # deploy and invoke lambda without Docker
    zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_ENV), get_content=True,
                                              libs=TEST_LAMBDA_LIBS, runtime=LAMBDA_RUNTIME_PYTHON27)
    testutil.create_lambda_function(func_name=func_name, zip_file=zip_file,
                                    runtime=LAMBDA_RUNTIME_PYTHON27, envvars={'Hello': 'World'})

    assert len(executor.get_all_container_names()) == 0

    assert executor.function_invoke_times == {}

    # invoke a few times.
    durations = []
    num_iterations = 3

    for i in range(0, num_iterations + 1):
        prev_invoke_time = None
        if i > 0:
            prev_invoke_time = executor.function_invoke_times[func_arn]

        start_time = time.time()
        lambda_client.invoke(FunctionName=func_name, Payload=b'{}')
        duration = time.time() - start_time

        assert len(executor.get_all_container_names()) == 1

        # ensure the last invoke time is being updated properly.
        if i > 0:
            assert executor.function_invoke_times[func_arn] > prev_invoke_time
        else:
            assert executor.function_invoke_times[func_arn] > 0

        durations.append(duration)

    # the first call would have created the container. subsequent calls would reuse and be faster.
    for i in range(1, num_iterations + 1):
        assert durations[i] < durations[0]

    status = executor.get_docker_container_status(func_arn)
    assert status == 1

    executor.cleanup()
    status = executor.get_docker_container_status(func_arn)
    assert status == 0

    assert len(executor.get_all_container_names()) == 0