def test_event_source_mapping_with_sqs(self):
        lambda_client = aws_stack.create_external_boto_client("lambda")
        sqs_client = aws_stack.create_external_boto_client("sqs")

        function_name = "lambda_func-{}".format(short_uid())
        queue_name_1 = "queue-{}-1".format(short_uid())

        testutil.create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON_ECHO,
            func_name=function_name,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        queue_url_1 = sqs_client.create_queue(
            QueueName=queue_name_1)["QueueUrl"]
        queue_arn_1 = aws_stack.sqs_queue_arn(queue_name_1)

        lambda_client.create_event_source_mapping(EventSourceArn=queue_arn_1,
                                                  FunctionName=function_name)

        sqs_client.send_message(QueueUrl=queue_url_1,
                                MessageBody=json.dumps({"foo": "bar"}))
        events = retry(get_lambda_log_events,
                       sleep_before=3,
                       function_name=function_name)

        # lambda was invoked 1 time
        self.assertEqual(1, len(events[0]["Records"]))
        rs = sqs_client.receive_message(QueueUrl=queue_url_1)
        self.assertIsNone(rs.get("Messages"))

        # clean up
        sqs_client.delete_queue(QueueUrl=queue_url_1)
        lambda_client.delete_function(FunctionName=function_name)
示例#2
0
    def test_apigateway_deployed(self):
        function_name = "sls-test-local-router"

        lambda_client = aws_stack.create_external_boto_client("lambda")

        resp = lambda_client.list_functions()
        function = [
            fn for fn in resp["Functions"]
            if fn["FunctionName"] == function_name
        ][0]
        self.assertEqual("handler.createHttpRouter", function["Handler"])

        apigw_client = aws_stack.create_external_boto_client("apigateway")
        apis = apigw_client.get_rest_apis()["items"]
        api_ids = [api["id"] for api in apis if api["id"] not in self.api_ids]
        self.assertEqual(1, len(api_ids))

        resources = apigw_client.get_resources(restApiId=api_ids[0])["items"]
        proxy_resources = [
            res for res in resources if res["path"] == "/foo/bar"
        ]
        self.assertEqual(1, len(proxy_resources))

        proxy_resource = proxy_resources[0]
        for method in ["DELETE", "POST", "PUT"]:
            self.assertIn(method, proxy_resource["resourceMethods"])
            resource_method = proxy_resource["resourceMethods"][method]
            self.assertIn(
                aws_stack.lambda_function_arn(function_name),
                resource_method["methodIntegration"]["uri"],
            )
示例#3
0
    def setUpClass(cls):
        cls.lambda_client = aws_stack.create_external_boto_client("lambda")
        cls.s3_client = aws_stack.create_external_boto_client("s3")
        cls.sfn_client = aws_stack.create_external_boto_client("stepfunctions")

        zip_file = testutil.create_lambda_archive(load_file(TEST_LAMBDA_ENV), get_content=True)
        zip_file2 = testutil.create_lambda_archive(
            load_file(TEST_LAMBDA_PYTHON_ECHO), get_content=True
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_1,
            zip_file=zip_file,
            envvars={"Hello": TEST_RESULT_VALUE},
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_2,
            zip_file=zip_file,
            envvars={"Hello": TEST_RESULT_VALUE},
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_3,
            zip_file=zip_file,
            envvars={"Hello": "Replace Value"},
        )
        testutil.create_lambda_function(
            func_name=TEST_LAMBDA_NAME_4,
            zip_file=zip_file,
            envvars={"Hello": TEST_RESULT_VALUE},
        )
        testutil.create_lambda_function(func_name=TEST_LAMBDA_NAME_5, zip_file=zip_file2)
    def test_deletion_event_source_mapping_with_dynamodb(self):
        function_name = "lambda_func-{}".format(short_uid())
        ddb_table = "ddb_table-{}".format(short_uid())

        testutil.create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON_ECHO,
            func_name=function_name,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        table_arn = aws_stack.create_dynamodb_table(
            ddb_table, partition_key="id")["TableDescription"]["TableArn"]
        lambda_client = aws_stack.create_external_boto_client("lambda")

        lambda_client.create_event_source_mapping(FunctionName=function_name,
                                                  EventSourceArn=table_arn)

        dynamodb_client = aws_stack.create_external_boto_client("dynamodb")
        dynamodb_client.delete_table(TableName=ddb_table)

        result = lambda_client.list_event_source_mappings(
            EventSourceArn=table_arn)
        self.assertEqual(0, len(result["EventSourceMappings"]))
        # clean up
        lambda_client.delete_function(FunctionName=function_name)
示例#5
0
 def setUp(self):
     self.events_client = aws_stack.create_external_boto_client("events")
     self.iam_client = aws_stack.create_external_boto_client("iam")
     self.sns_client = aws_stack.create_external_boto_client("sns")
     self.sfn_client = aws_stack.create_external_boto_client(
         "stepfunctions")
     self.sqs_client = aws_stack.create_external_boto_client("sqs")
示例#6
0
    def test_trigger_event_on_ssm_change(self):
        sqs = aws_stack.create_external_boto_client("sqs")
        ssm = aws_stack.create_external_boto_client("ssm")
        rule_name = "rule-{}".format(short_uid())
        target_id = "target-{}".format(short_uid())

        # create queue
        queue_name = "queue-{}".format(short_uid())
        queue_url = sqs.create_queue(QueueName=queue_name)["QueueUrl"]
        queue_arn = aws_stack.sqs_queue_arn(queue_name)

        # put rule listening on SSM changes
        ssm_prefix = "/test/local/"
        self.events_client.put_rule(
            Name=rule_name,
            EventPattern=json.dumps({
                "detail": {
                    "name": [{
                        "prefix": ssm_prefix
                    }],
                    "operation":
                    ["Create", "Update", "Delete", "LabelParameterVersion"],
                },
                "detail-type": ["Parameter Store Change"],
                "source": ["aws.ssm"],
            }),
            State="ENABLED",
            Description="Trigger on SSM parameter changes",
        )

        # put target
        self.events_client.put_targets(
            Rule=rule_name,
            EventBusName=TEST_EVENT_BUS_NAME,
            Targets=[{
                "Id": target_id,
                "Arn": queue_arn,
                "InputPath": "$.detail"
            }],
        )

        # change SSM param to trigger event
        ssm.put_parameter(Name=f"{ssm_prefix}/test123",
                          Value="value1",
                          Type="String")

        def assert_message():
            resp = sqs.receive_message(QueueUrl=queue_url)
            result = resp.get("Messages")
            body = json.loads(result[0]["Body"])
            assert body == {
                "name": "/test/local/test123",
                "operation": "Create"
            }

        # assert that message has been received
        retry(assert_message, retries=7, sleep=0.3)

        # clean up
        self.cleanup(rule_name=rule_name, target_ids=target_id)
    def test_put_delivery_channel(self):
        iam_role_name = "role-{}".format(short_uid())
        iam_role_arn = self.create_iam_role(iam_role_name)

        self.create_configuration_recorder(iam_role_arn)

        s3_client = aws_stack.create_external_boto_client("s3")
        test_bucket_name = f"test-bucket-{short_uid()}"
        s3_client.create_bucket(Bucket=test_bucket_name)

        sns_client = aws_stack.create_external_boto_client("sns")
        sns_topic_arn = sns_client.create_topic(
            Name="test-sns-topic")["TopicArn"]

        delivery_channel_name = "test-delivery-channel"
        self.config_service_client.put_delivery_channel(
            DeliveryChannel={
                "name": delivery_channel_name,
                "s3BucketName": test_bucket_name,
                "snsTopicARN": sns_topic_arn,
                "configSnapshotDeliveryProperties": {
                    "deliveryFrequency": "Twelve_Hours"
                },
            })

        delivery_channels = self.config_service_client.describe_delivery_channels(
        )["DeliveryChannels"]
        self.assertIn(test_bucket_name, delivery_channels[0]["s3BucketName"])
        self.assertIn(sns_topic_arn, delivery_channels[0]["snsTopicARN"])
        self.assertEqual(1, len(delivery_channels))

        self.config_service_client.delete_delivery_channel(
            DeliveryChannelName=delivery_channel_name)
        self.config_service_client.delete_configuration_recorder(
            ConfigurationRecorderName=TEST_CONFIG_RECORDER_NAME)
示例#8
0
    def test_create_resolver_endpoint(self):
        ec2 = aws_stack.create_external_boto_client("ec2")
        resolver = aws_stack.create_external_boto_client("route53resolver")

        # getting list of existing (default) subnets
        subnets = ec2.describe_subnets()["Subnets"]
        subnet_ids = [s["SubnetId"] for s in subnets]
        # construct IPs within CIDR range
        ips = [re.sub(r"(.*)\.[0-9]+/.+", r"\1.5", s["CidrBlock"]) for s in subnets]

        groups = []
        addresses = [
            {"SubnetId": subnet_ids[0], "Ip": ips[0]},
            {"SubnetId": subnet_ids[1], "Ip": ips[1]},
        ]

        result = resolver.create_resolver_endpoint(
            CreatorRequestId="req123",
            SecurityGroupIds=groups,
            Direction="INBOUND",
            IpAddresses=addresses,
        )
        result = result.get("ResolverEndpoint")
        assert result
        assert result.get("CreatorRequestId") == "req123"
        assert result.get("Direction") == "INBOUND"
示例#9
0
    def test_queue_handler_deployed(self):
        function_name = "sls-test-local-queueHandler"
        queue_name = "sls-test-local-CreateQueue"

        lambda_client = aws_stack.create_external_boto_client("lambda")
        sqs_client = aws_stack.create_external_boto_client("sqs")

        resp = lambda_client.list_functions()
        function = [
            fn for fn in resp["Functions"]
            if fn["FunctionName"] == function_name
        ][0]
        self.assertEqual("handler.createQueue", function["Handler"])

        resp = lambda_client.list_event_source_mappings(
            FunctionName=function_name)
        events = resp["EventSourceMappings"]
        self.assertEqual(1, len(events))
        event_source_arn = events[0]["EventSourceArn"]

        self.assertEqual(event_source_arn, aws_stack.sqs_queue_arn(queue_name))
        result = sqs_client.get_queue_attributes(
            QueueUrl=aws_stack.get_sqs_queue_url(queue_name),
            AttributeNames=[
                "RedrivePolicy",
            ],
        )
        redrive_policy = json.loads(result["Attributes"]["RedrivePolicy"])
        self.assertEqual(3, redrive_policy["maxReceiveCount"])
示例#10
0
    def test_kinesis_stream_handler_deployed(self):
        function_name = "sls-test-local-kinesisStreamHandler"
        function_name2 = "sls-test-local-kinesisConsumerHandler"
        stream_name = "KinesisTestStream"

        lambda_client = aws_stack.create_external_boto_client("lambda")
        kinesis_client = aws_stack.create_external_boto_client("kinesis")

        resp = lambda_client.list_functions()
        function = [
            fn for fn in resp["Functions"]
            if fn["FunctionName"] == function_name
        ][0]
        self.assertEqual("handler.processKinesis", function["Handler"])

        resp = lambda_client.list_event_source_mappings(
            FunctionName=function_name)
        mappings = resp["EventSourceMappings"]
        self.assertEqual(1, len(mappings))
        event_source_arn = mappings[0]["EventSourceArn"]

        resp = kinesis_client.describe_stream(StreamName=stream_name)
        self.assertEqual(event_source_arn,
                         resp["StreamDescription"]["StreamARN"])

        # assert that stream consumer is properly connected and Lambda gets invoked
        def assert_invocations():
            events = get_lambda_log_events(function_name2)
            self.assertEqual(1, len(events))

        kinesis_client.put_record(StreamName=stream_name,
                                  Data=b"test123",
                                  PartitionKey="key1")
        retry(assert_invocations, sleep=1, retries=5)
    def test_disabled_event_source_mapping_with_dynamodb(self):
        function_name = "lambda_func-{}".format(short_uid())
        ddb_table = "ddb_table-{}".format(short_uid())

        testutil.create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON_ECHO,
            func_name=function_name,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        table_arn = aws_stack.create_dynamodb_table(
            ddb_table, partition_key="id")["TableDescription"]["TableArn"]

        lambda_client = aws_stack.create_external_boto_client("lambda")

        rs = lambda_client.create_event_source_mapping(
            FunctionName=function_name, EventSourceArn=table_arn)
        uuid = rs["UUID"]

        dynamodb = aws_stack.connect_to_resource("dynamodb")
        table = dynamodb.Table(ddb_table)

        items = [
            {
                "id": short_uid(),
                "data": "data1"
            },
            {
                "id": short_uid(),
                "data": "data2"
            },
        ]

        table.put_item(Item=items[0])
        events = get_lambda_log_events(function_name)

        # lambda was invoked 1 time
        self.assertEqual(1, len(events[0]["Records"]))

        # disable event source mapping
        lambda_client.update_event_source_mapping(UUID=uuid, Enabled=False)

        table.put_item(Item=items[1])
        events = get_lambda_log_events(function_name)

        # lambda no longer invoked, still have 1 event
        self.assertEqual(1, len(events[0]["Records"]))

        # clean up
        dynamodb_client = aws_stack.create_external_boto_client("dynamodb")
        dynamodb_client.delete_table(TableName=ddb_table)

        lambda_client.delete_function(FunctionName=function_name)
示例#12
0
    def test_dynamodb_stream_shard_iterator(self):
        def wait_for_stream_created(table_name):
            stream_name = get_kinesis_stream_name(table_name)
            stream = KinesisStream(id=stream_name, num_shards=1)
            kinesis = aws_stack.create_external_boto_client(
                "kinesis", env=get_environment(None))
            stream.connect(kinesis)
            stream.wait_for()

        dynamodb = aws_stack.create_external_boto_client("dynamodb")
        ddbstreams = aws_stack.create_external_boto_client("dynamodbstreams")

        table_name = "table_with_stream-%s" % short_uid()
        table = dynamodb.create_table(
            TableName=table_name,
            KeySchema=[{
                "AttributeName": "id",
                "KeyType": "HASH"
            }],
            AttributeDefinitions=[{
                "AttributeName": "id",
                "AttributeType": "S"
            }],
            StreamSpecification={
                "StreamEnabled": True,
                "StreamViewType": "NEW_IMAGE",
            },
            ProvisionedThroughput={
                "ReadCapacityUnits": 5,
                "WriteCapacityUnits": 5
            },
        )

        wait_for_stream_created(table_name)

        stream_arn = table["TableDescription"]["LatestStreamArn"]
        result = ddbstreams.describe_stream(StreamArn=stream_arn)

        response = ddbstreams.get_shard_iterator(
            StreamArn=stream_arn,
            ShardId=result["StreamDescription"]["Shards"][0]["ShardId"],
            ShardIteratorType="LATEST",
        )
        assert "ShardIterator" in response
        response = ddbstreams.get_shard_iterator(
            StreamArn=stream_arn,
            ShardId=result["StreamDescription"]["Shards"][0]["ShardId"],
            ShardIteratorType="AT_SEQUENCE_NUMBER",
            SequenceNumber=result["StreamDescription"]["Shards"][0].get(
                "SequenceNumberRange").get("StartingSequenceNumber"),
        )
        assert "ShardIterator" in response
    def test_create_kinesis_event_source_mapping(self):
        function_name = f"lambda_func-{short_uid()}"
        stream_name = f"test-foobar-{short_uid()}"

        testutil.create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON_ECHO,
            func_name=function_name,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        arn = aws_stack.kinesis_stream_arn(stream_name,
                                           account_id="000000000000")

        lambda_client = aws_stack.create_external_boto_client("lambda")
        lambda_client.create_event_source_mapping(EventSourceArn=arn,
                                                  FunctionName=function_name)

        def process_records(record):
            assert record

        aws_stack.create_kinesis_stream(stream_name, delete=True)
        kinesis_connector.listen_to_kinesis(
            stream_name=stream_name,
            listener_func=process_records,
            wait_until_started=True,
        )

        kinesis = aws_stack.create_external_boto_client("kinesis")
        stream_summary = kinesis.describe_stream_summary(
            StreamName=stream_name)
        self.assertEqual(
            1, stream_summary["StreamDescriptionSummary"]["OpenShardCount"])
        num_events_kinesis = 10
        kinesis.put_records(
            Records=[{
                "Data": "{}",
                "PartitionKey": "test_%s" % i
            } for i in range(0, num_events_kinesis)],
            StreamName=stream_name,
        )

        events = get_lambda_log_events(function_name)
        self.assertEqual(10, len(events[0]["Records"]))

        self.assertIn("eventID", events[0]["Records"][0])
        self.assertIn("eventSourceARN", events[0]["Records"][0])
        self.assertIn("eventSource", events[0]["Records"][0])
        self.assertIn("eventVersion", events[0]["Records"][0])
        self.assertIn("eventName", events[0]["Records"][0])
        self.assertIn("invokeIdentityArn", events[0]["Records"][0])
        self.assertIn("awsRegion", events[0]["Records"][0])
        self.assertIn("kinesis", events[0]["Records"][0])
示例#14
0
    def test_multi_region_api_gateway(self):
        gw_1 = aws_stack.create_external_boto_client("apigateway",
                                                     region_name=REGION1)
        gw_2 = aws_stack.create_external_boto_client("apigateway",
                                                     region_name=REGION2)
        gw_3 = aws_stack.create_external_boto_client("apigateway",
                                                     region_name=REGION3)
        sqs_1 = aws_stack.create_external_boto_client("sqs",
                                                      region_name=REGION1)
        len_1 = len(gw_1.get_rest_apis()["items"])
        len_2 = len(gw_2.get_rest_apis()["items"])

        api_name1 = "a-%s" % short_uid()
        gw_1.create_rest_api(name=api_name1)
        result1 = gw_1.get_rest_apis()["items"]
        self.assertEqual(len(result1), len_1 + 1)
        self.assertEqual(len(gw_2.get_rest_apis()["items"]), len_2)

        api_name2 = "a-%s" % short_uid()
        gw_2.create_rest_api(name=api_name2)
        result2 = gw_2.get_rest_apis()["items"]
        self.assertEqual(len(gw_1.get_rest_apis()["items"]), len_1 + 1)
        self.assertEqual(len(result2), len_2 + 1)

        api_name3 = "a-%s" % short_uid()
        queue_name1 = "q-%s" % short_uid()
        result = sqs_1.create_queue(QueueName=queue_name1)
        queue_arn = aws_stack.sqs_queue_arn(queue_name1, region_name=REGION1)
        result = connect_api_gateway_to_sqs(api_name3,
                                            stage_name="test",
                                            queue_arn=queue_arn,
                                            path="/data",
                                            region_name=REGION3)
        api_id = result["id"]
        result = gw_3.get_rest_apis()["items"]
        self.assertEqual(result[-1]["name"], api_name3)

        # post message and receive from SQS
        url = self._gateway_request_url(api_id=api_id,
                                        stage_name="test",
                                        path="/data")
        test_data = {"foo": "bar"}
        result = requests.post(url, data=json.dumps(test_data))
        self.assertEqual(result.status_code, 200)
        messages = aws_stack.sqs_receive_message(queue_arn)["Messages"]
        self.assertEqual(len(messages), 1)
        self.assertEqual(
            json.loads(to_str(base64.b64decode(to_str(messages[0]["Body"])))),
            test_data)
示例#15
0
    def test_dynamodb_stream_to_lambda(self):
        table_name = "ddb-table-%s" % short_uid()
        function_name = "func-%s" % short_uid()
        partition_key = "SK"

        aws_stack.create_dynamodb_table(
            table_name=table_name,
            partition_key=partition_key,
            stream_view_type="NEW_AND_OLD_IMAGES",
        )
        table = self.dynamodb.Table(table_name)
        latest_stream_arn = table.latest_stream_arn

        testutil.create_lambda_function(
            handler_file=TEST_LAMBDA_PYTHON_ECHO,
            func_name=function_name,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        lambda_client = aws_stack.create_external_boto_client("lambda")
        lambda_client.create_event_source_mapping(
            EventSourceArn=latest_stream_arn, FunctionName=function_name)

        item = {"SK": short_uid(), "Name": "name-{}".format(short_uid())}

        table.put_item(Item=item)

        events = retry(
            check_expected_lambda_log_events_length,
            retries=3,
            sleep=1,
            function_name=function_name,
            expected_length=1,
            regex_filter=r"Records",
        )

        self.assertEqual(1, len(events))
        self.assertEqual(1, len(events[0]["Records"]))

        dynamodb_event = events[0]["Records"][0]["dynamodb"]
        self.assertEqual("NEW_AND_OLD_IMAGES",
                         dynamodb_event["StreamViewType"])
        self.assertEqual({"SK": {"S": item["SK"]}}, dynamodb_event["Keys"])
        self.assertEqual({"S": item["Name"]},
                         dynamodb_event["NewImage"]["Name"])
        self.assertIn("SequenceNumber", dynamodb_event)

        dynamodb = aws_stack.create_external_boto_client("dynamodb")
        dynamodb.delete_table(TableName=table_name)
示例#16
0
    def test_adding_fallback_function_name_in_headers(self):
        lambda_client = aws_stack.create_external_boto_client("lambda")
        ddb_client = aws_stack.create_external_boto_client("dynamodb")

        db_table = "lambda-records"
        config.LAMBDA_FALLBACK_URL = "dynamodb://%s" % db_table

        lambda_client.invoke(
            FunctionName="non-existing-lambda",
            Payload=b"{}",
            InvocationType="RequestResponse",
        )

        result = run_safe(ddb_client.scan, TableName=db_table)
        self.assertEqual("non-existing-lambda", result["Items"][0]["function_name"]["S"])
示例#17
0
    def test_query_on_deleted_resource(self):
        table_name = "ddb-table-%s" % short_uid()
        partition_key = "username"

        dynamodb = aws_stack.create_external_boto_client("dynamodb")
        aws_stack.create_dynamodb_table(table_name, partition_key)

        rs = dynamodb.query(
            TableName=table_name,
            KeyConditionExpression="{} = :username".format(partition_key),
            ExpressionAttributeValues={":username": {
                "S": "test"
            }},
        )
        assert rs["ResponseMetadata"]["HTTPStatusCode"] == 200

        dynamodb.delete_table(TableName=table_name)

        with pytest.raises(Exception) as ctx:
            dynamodb.query(
                TableName=table_name,
                KeyConditionExpression="{} = :username".format(partition_key),
                ExpressionAttributeValues={":username": {
                    "S": "test"
                }},
            )
        assert ctx.match("ResourceNotFoundException")
示例#18
0
 def wait_for_stream_created(table_name):
     stream_name = get_kinesis_stream_name(table_name)
     stream = KinesisStream(id=stream_name, num_shards=1)
     kinesis = aws_stack.create_external_boto_client(
         "kinesis", env=get_environment(None))
     stream.connect(kinesis)
     stream.wait_for()
示例#19
0
    def test_delete_table(self):
        table_name = "test-ddb-table-%s" % short_uid()
        dynamodb = aws_stack.create_external_boto_client("dynamodb")

        tables_before = len(dynamodb.list_tables()["TableNames"])

        dynamodb.create_table(
            TableName=table_name,
            KeySchema=[{
                "AttributeName": "id",
                "KeyType": "HASH"
            }],
            AttributeDefinitions=[{
                "AttributeName": "id",
                "AttributeType": "S"
            }],
            ProvisionedThroughput={
                "ReadCapacityUnits": 1,
                "WriteCapacityUnits": 1
            },
        )

        table_list = dynamodb.list_tables()
        # TODO: fix assertion, to enable parallel test execution!
        assert tables_before + 1 == len(table_list["TableNames"])
        assert table_name in table_list["TableNames"]

        dynamodb.delete_table(TableName=table_name)

        table_list = dynamodb.list_tables()
        assert tables_before == len(table_list["TableNames"])

        with pytest.raises(Exception) as ctx:
            dynamodb.delete_table(TableName=table_name)
        assert ctx.match("ResourceNotFoundException")
    def test_kinesis_error_injection(self, monkeypatch, kinesis_client,
                                     wait_for_stream_ready):
        kinesis = aws_stack.create_external_boto_client(
            "kinesis", config=self.retry_config())
        stream_name = f"stream-{short_uid()}"
        aws_stack.create_kinesis_stream(stream_name)
        wait_for_stream_ready(stream_name)

        try:
            records = [{
                "Data": "0",
                "ExplicitHashKey": "0",
                "PartitionKey": "0"
            }]

            # by default, no errors
            test_no_errors = kinesis.put_records(StreamName=stream_name,
                                                 Records=records)
            assert test_no_errors["FailedRecordCount"] == 0

            # with a probability of 1, always throw errors
            monkeypatch.setattr(config, "KINESIS_ERROR_PROBABILITY", 1.0)
            test_all_errors = kinesis.put_records(StreamName=stream_name,
                                                  Records=records)
            assert test_all_errors["FailedRecordCount"] == 1
        finally:
            kinesis_client.delete_stream(StreamName=stream_name)
示例#21
0
    def _invoke_s3_via_edge(self, edge_url):
        client = aws_stack.create_external_boto_client("s3", endpoint_url=edge_url)
        bucket_name = "edge-%s" % short_uid()

        client.create_bucket(Bucket=bucket_name)
        result = client.head_bucket(Bucket=bucket_name)
        assert result["ResponseMetadata"]["HTTPStatusCode"] == 200
        client.delete_bucket(Bucket=bucket_name)

        bucket_name = "edge-%s" % short_uid()
        object_name = "testobject"
        bucket_url = "%s/%s" % (edge_url, bucket_name)
        result = requests.put(bucket_url, verify=False)
        assert result.status_code == 200
        result = client.head_bucket(Bucket=bucket_name)
        assert result["ResponseMetadata"]["HTTPStatusCode"] == 200
        headers = {"Content-Type": "application/x-www-form-urlencoded"}
        result = requests.post(
            bucket_url,
            data="key=%s&file=file_content_123" % object_name,
            headers=headers,
            verify=False,
        )
        assert result.status_code == 204

        bucket_url = "%s/example" % bucket_url
        result = requests.put(bucket_url, data="hello", verify=False)
        assert result.status_code == 200

        result = io.BytesIO()
        client.download_fileobj(bucket_name, object_name, result)
        assert to_str(result.getvalue()) == "file_content_123"
示例#22
0
    def test_reusable_delegation_sets(self):
        client = aws_stack.create_external_boto_client("route53")

        sets_before = client.list_reusable_delegation_sets().get("DelegationSets", [])

        call_ref_1 = "c-%s" % short_uid()
        result_1 = client.create_reusable_delegation_set(CallerReference=call_ref_1)[
            "DelegationSet"
        ]
        set_id_1 = result_1["Id"]

        call_ref_2 = "c-%s" % short_uid()
        result_2 = client.create_reusable_delegation_set(CallerReference=call_ref_2)[
            "DelegationSet"
        ]
        set_id_2 = result_2["Id"]

        result_1 = client.get_reusable_delegation_set(Id=set_id_1)
        assert result_1["ResponseMetadata"]["HTTPStatusCode"] == 200
        assert result_1["DelegationSet"]["Id"] == set_id_1

        result_1 = client.list_reusable_delegation_sets()
        assert result_1["ResponseMetadata"]["HTTPStatusCode"] == 200
        # TODO: assertion should be updated, to allow for parallel tests
        assert len(result_1["DelegationSets"]) == len(sets_before) + 2

        result_1 = client.delete_reusable_delegation_set(Id=set_id_1)
        assert result_1["ResponseMetadata"]["HTTPStatusCode"] == 200

        result_2 = client.delete_reusable_delegation_set(Id=set_id_2)
        assert result_2["ResponseMetadata"]["HTTPStatusCode"] == 200

        with pytest.raises(Exception) as ctx:
            client.get_reusable_delegation_set(Id=set_id_1)
        assert "NoSuchDelegationSet" in str(ctx.value)
示例#23
0
    def test_apigateway(self):
        apigateway_client = aws_stack.create_external_boto_client("apigateway")
        rest_apis = apigateway_client.get_rest_apis()

        rest_id = None
        for rest_api in rest_apis["items"]:
            if rest_api["name"] == "test-tf-apigateway":
                rest_id = rest_api["id"]
                break

        self.assertTrue(rest_id)
        resources = apigateway_client.get_resources(restApiId=rest_id)["items"]

        # We always have 1 default root resource (with path "/")
        self.assertEqual(3, len(resources))

        res1 = [r for r in resources if r.get("pathPart") == "mytestresource"]
        self.assertTrue(res1)
        self.assertEqual("/mytestresource", res1[0]["path"])
        self.assertEqual(2, len(res1[0]["resourceMethods"]))
        self.assertEqual(
            "MOCK",
            res1[0]["resourceMethods"]["GET"]["methodIntegration"]["type"])

        res2 = [r for r in resources if r.get("pathPart") == "mytestresource1"]
        self.assertTrue(res2)
        self.assertEqual("/mytestresource1", res2[0]["path"])
        self.assertEqual(2, len(res2[0]["resourceMethods"]))
        self.assertEqual(
            "AWS_PROXY",
            res2[0]["resourceMethods"]["GET"]["methodIntegration"]["type"])
        self.assertTrue(
            res2[0]["resourceMethods"]["GET"]["methodIntegration"]["uri"])
示例#24
0
    def test_apigateway(self):
        apigateway_client = aws_stack.create_external_boto_client("apigateway")
        rest_apis = apigateway_client.get_rest_apis()

        rest_id = None
        for rest_api in rest_apis["items"]:
            if rest_api["name"] == "test-tf-apigateway":
                rest_id = rest_api["id"]
                break

        assert rest_id
        resources = apigateway_client.get_resources(restApiId=rest_id)["items"]

        # We always have 1 default root resource (with path "/")
        assert len(resources) == 3

        res1 = [r for r in resources if r.get("pathPart") == "mytestresource"]
        assert res1
        assert res1[0]["path"] == "/mytestresource"
        assert len(res1[0]["resourceMethods"]) == 2
        assert res1[0]["resourceMethods"]["GET"]["methodIntegration"]["type"] == "MOCK"

        res2 = [r for r in resources if r.get("pathPart") == "mytestresource1"]
        assert res2
        assert res2[0]["path"] == "/mytestresource1"
        assert len(res2[0]["resourceMethods"]) == 2
        assert res2[0]["resourceMethods"]["GET"]["methodIntegration"]["type"] == "AWS_PROXY"
        assert res2[0]["resourceMethods"]["GET"]["methodIntegration"]["uri"]
示例#25
0
    def _invoke_s3_via_edge_multipart_form(self, edge_url):
        client = aws_stack.create_external_boto_client("s3",
                                                       endpoint_url=edge_url)
        bucket_name = "edge-%s" % short_uid()
        object_name = "testobject"
        object_data = b"testdata"

        client.create_bucket(Bucket=bucket_name)
        presigned_post = client.generate_presigned_post(
            bucket_name, object_name)

        files = {"file": object_data}
        r = requests.post(
            presigned_post["url"],
            data=presigned_post["fields"],
            files=files,
            verify=False,
        )
        assert r.status_code == 204

        result = io.BytesIO()
        client.download_fileobj(bucket_name, object_name, result)
        assert to_str(result.getvalue()) == to_str(object_data)

        client.delete_object(Bucket=bucket_name, Key=object_name)
        client.delete_bucket(Bucket=bucket_name)
示例#26
0
    def test_python_lambda_subscribe_sns_topic(self, create_lambda_function):
        sns_client = aws_stack.create_external_boto_client("sns")
        function_name = f"{TEST_LAMBDA_FUNCTION_PREFIX}-{short_uid()}"

        create_lambda_function(
            func_name=function_name,
            handler_file=TEST_LAMBDA_PYTHON_ECHO,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        topic = sns_client.create_topic(Name=TEST_SNS_TOPIC_NAME)
        topic_arn = topic["TopicArn"]

        sns_client.subscribe(
            TopicArn=topic_arn,
            Protocol="lambda",
            Endpoint=lambda_api.func_arn(function_name),
        )

        subject = "[Subject] Test subject"
        message = "Hello world."
        sns_client.publish(TopicArn=topic_arn, Subject=subject, Message=message)

        events = retry(
            check_expected_lambda_log_events_length,
            retries=3,
            sleep=1,
            function_name=function_name,
            expected_length=1,
            regex_filter="Records.*Sns",
        )
        notification = events[0]["Records"][0]["Sns"]

        assert "Subject" in notification
        assert subject == notification["Subject"]
示例#27
0
    def test_deletion_event_source_mapping_with_dynamodb(
        self, create_lambda_function, lambda_client
    ):
        function_name = f"lambda_func-{short_uid()}"
        ddb_table = f"ddb_table-{short_uid()}"

        create_lambda_function(
            func_name=function_name,
            handler_file=TEST_LAMBDA_PYTHON_ECHO,
            runtime=LAMBDA_RUNTIME_PYTHON36,
        )

        table_arn = aws_stack.create_dynamodb_table(ddb_table, partition_key="id")[
            "TableDescription"
        ]["TableArn"]

        lambda_client.create_event_source_mapping(
            FunctionName=function_name, EventSourceArn=table_arn
        )

        dynamodb_client = aws_stack.create_external_boto_client("dynamodb")
        dynamodb_client.delete_table(TableName=ddb_table)

        result = lambda_client.list_event_source_mappings(EventSourceArn=table_arn)
        assert 0 == len(result["EventSourceMappings"])
示例#28
0
    def test_put_events_into_event_bus(self):
        queue_name = "queue-{}".format(short_uid())
        rule_name = "rule-{}".format(short_uid())
        target_id = "target-{}".format(short_uid())
        bus_name_1 = "bus1-{}".format(short_uid())
        bus_name_2 = "bus2-{}".format(short_uid())

        sqs_client = aws_stack.create_external_boto_client("sqs")
        queue_url = sqs_client.create_queue(QueueName=queue_name)["QueueUrl"]
        queue_arn = aws_stack.sqs_queue_arn(queue_name)

        self.events_client.create_event_bus(Name=bus_name_1)
        resp = self.events_client.create_event_bus(Name=bus_name_2)
        self.events_client.put_rule(
            Name=rule_name,
            EventBusName=bus_name_1,
            EventPattern=json.dumps(TEST_EVENT_PATTERN),
        )

        self.events_client.put_targets(
            Rule=rule_name,
            EventBusName=bus_name_1,
            Targets=[{
                "Id": target_id,
                "Arn": resp.get("EventBusArn")
            }],
        )
        self.events_client.put_targets(
            Rule=rule_name,
            EventBusName=bus_name_2,
            Targets=[{
                "Id": target_id,
                "Arn": queue_arn
            }],
        )

        self.events_client.put_events(
            Entries=[{
                "EventBusName": bus_name_1,
                "Source": TEST_EVENT_PATTERN["Source"][0],
                "DetailType": TEST_EVENT_PATTERN["detail-type"][0],
                "Detail": json.dumps(TEST_EVENT_PATTERN["Detail"][0]),
            }])

        def get_message(queue_url):
            resp = sqs_client.receive_message(QueueUrl=queue_url)
            return resp["Messages"]

        messages = retry(get_message, retries=3, sleep=1, queue_url=queue_url)
        self.assertEqual(1, len(messages))

        actual_event = json.loads(messages[0]["Body"])
        self.assertIsValidEvent(actual_event)
        self.assertEqual(TEST_EVENT_PATTERN["Detail"][0],
                         actual_event["detail"])

        # clean up
        self.cleanup(bus_name_1, rule_name, target_id)
        self.cleanup(bus_name_2)
        sqs_client.delete_queue(QueueUrl=queue_url)
示例#29
0
    def test_record_lifecycle_data_integrity(self):
        """
        kinesis records should contain the same data from when they are sent to when they are received
        """
        client = aws_stack.create_external_boto_client("kinesis")
        stream_name = "test-%s" % short_uid()
        records_data = {"test", "ünicödé 统一码 💣💻🔥", "a" * 1000, ""}

        client.create_stream(StreamName=stream_name, ShardCount=1)
        sleep(1.5)
        iterator = self._get_shard_iterator(stream_name)

        for record_data in records_data:
            client.put_record(
                StreamName=stream_name,
                Data=record_data,
                PartitionKey="1",
            )

        response = client.get_records(ShardIterator=iterator)
        response_records = response.get("Records")
        self.assertEqual(len(records_data), len(response_records))
        for response_record in response_records:
            self.assertIn(
                response_record.get("Data").decode("utf-8"), records_data)
示例#30
0
 def test_lambda(self):
     lambda_client = aws_stack.create_external_boto_client("lambda")
     response = lambda_client.get_function(FunctionName=LAMBDA_NAME)
     assert response["Configuration"]["FunctionName"] == LAMBDA_NAME
     assert response["Configuration"]["Handler"] == LAMBDA_HANDLER
     assert response["Configuration"]["Runtime"] == LAMBDA_RUNTIME
     assert response["Configuration"]["Role"] == LAMBDA_ROLE