def test_referred_object_can_be_a_pseudo_parameter(self):
        # Setup
        name = "SomeRegion"
        ref = Ref(AWS_Region)
        stack = Stack(Resources={name: ref})

        dumper = create_refsafe_dumper(stack)

        # Exercise
        node = ref.as_yaml_node(dumper)

        # Verify
        assert node.value == "AWS::Region"
    def test_uses_logical_name_from_stack(self, name):
        # Setup
        data = SingleAttributeObject(one=42)
        stack = Stack(Resources={name: data})
        ref = Ref(data)

        dumper = create_refsafe_dumper(stack)

        # Exercise
        node = ref.as_yaml_node(dumper)

        # Verify
        assert node.value == name
    def test_invalid_stack_object_types_cannot_be_found(self, object_type):
        # Setup
        data = SingleAttributeObject(one=42)
        name = "Foo"
        stack = Stack()
        setattr(stack, object_type, {name: data})
        ref = Ref(data)

        dumper = create_refsafe_dumper(stack)

        # Exercise & Verify
        with pytest.raises(Exception):
            ref.as_yaml_node(dumper)
    def test_referred_object_can_be_a_plain_dict(self):
        # Setup
        data = dict(bar="hello")
        name = "Foo"
        stack = Stack(Resources={name: data})
        ref = Ref(data)

        dumper = create_refsafe_dumper(stack)

        # Exercise
        node = ref.as_yaml_node(dumper)

        # Verify
        assert node.value == name
    def test_all_valid_stack_object_types_can_be_found(self, object_type):
        # Setup
        data = SingleAttributeObject(one=42)
        name = "Foo"
        stack = Stack()
        stack[object_type][name] = data
        ref = Ref(data)

        dumper = create_refsafe_dumper(stack)

        # Exercise
        node = ref.as_yaml_node(dumper)

        # Verify
        assert node.value == name
    def test_hash_is_different_from_hash_of_referent(self):
        # Setup
        data = SingleAttributeObject(one=42)
        ref = Ref(data)

        # Verify
        assert hash(data) != hash(ref)
Example #7
0
def create_lambda_invalidation_stack(function: str,
                                     dependencies: List[SSMParameter],
                                     role) -> Stack:
    """Create CloudFormation resources to invalidate a single AWS Lambda Function.

    This is accomplished by adding a meaningless environment variable to the
    Function, which will force it to re-deploy into a new execution context
    (but without altering any behaviour).

    Parameters:
        dependencies: SSM Parameters that this Function uses
        function: CloudFormation reference to the Lambda Function
            (eg. an unversioned ARN, or the name)
        role: CloudFormation reference (eg. an ARN) to an IAM role
            that will be used to modify the Function.
    """
    # TODO make role optional, and create it on-the-fly if not provided
    # TODO find a way to share role and lambda between multiple calls in the same stack? can de-dupe/cache based on identity in the final stack
    # TODO get Lambda handler to have an internal timeout as well?

    stack = Stack(
        Description="Invalidate Lambda Function after parameter update")

    # Create an inline Lambda that can restart an ECS service, since this
    # isn't built-in CloudFormation functionality.
    stack.Resources[
        "ReplacementLambda"] = replace_lambda_context_lambda = Function.create_from_python_function(
            handler=replace_lambda_context_resource_handler, Role=role)

    # Set the Lambda Replacer's timeout to a fixed value. This should be
    # universal, so we don't let callers specify it.
    replace_lambda_context_lambda.Properties.Timeout = 20

    # Create a custom resource to replace the Lambda's execution context.
    #
    # We don't want this to happen until the parameters have
    # all been created, so we need to have the Restarter resource depend on
    # the parameters (either implicitly or via DependsOn). We also want the
    # restart to only happen if the parameters have actually changed - this
    # can be done if we make the SSM Parameters be part of the resource
    # specification (both the key and the value).
    # TODO pull out common code here
    stack.Resources["Replacer"] = dict(
        Type="Custom::ReplaceLambdaContext",
        Properties=dict(
            ServiceToken=GetAtt(replace_lambda_context_lambda, "Arn"),
            FunctionName=function,
            IgnoredParameterNames=[Ref(p) for p in dependencies],
            IgnoredParameterKeys=[GetAtt(p, "Value") for p in dependencies],
        ),
    )

    # TODO consider creating a waiter anyway, so that the timeout is strictly reliable

    return stack
    def test_referred_object_that_is_a_string_is_rejected_immediately(self):
        # Setup
        data = SingleAttributeObject(one=42)
        name = "Foo"
        stack = Stack(Resources={name: data})

        # Exercise & Verify
        with pytest.raises(TypeError) as excinfo:
            _ = Ref(name)

        assert "directly create a Ref to a name" in str(excinfo.value)
    def test_region_can_be_a_ref_function(self):
        # Setup
        dumper = create_refsafe_dumper(Stack())
        region = AWS_Region
        func = GetAZs(Ref(region))

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        function_param = self._get_mapping_node_value(node, 0)
        assert function_param == str(region)
    def test_ref_is_not_equal_to_arbitrary_object(self, func):
        # Setup
        data = SingleAttributeObject(one=42)

        ref = Ref(data)
        other = func(data)

        # Verify
        assert ref != other
        assert other != ref
        assert not (ref == other)
        assert not (other == ref)
        assert hash(ref) != hash(other)
    def test_nested_function_forces_longform_name(self):
        # Setup
        dumper = create_refsafe_dumper(Stack())
        func = GetAZs(Ref(AWS_Region))

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert node.tag == dumper.DEFAULT_MAPPING_TAG
        assert len(node.value) == 1

        function_name = get_mapping_node_key(node, 0)
        assert function_name == "Fn::GetAZs"
    def test_nested_function_forces_longform_name(self):
        # Setup
        dumper, func = self._create_getatt_function("SomeResource", "Foo",
                                                    Ref(AWS_Region))

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert node.tag == dumper.DEFAULT_MAPPING_TAG
        assert len(node.value) == 1

        function_name = get_mapping_node_key(node, 0)
        assert function_name == "Fn::GetAtt"
Example #13
0
    def test_creates_stack_with_supplied_parameters(self):
        # Setup
        ssm_parameter = SSMParameter(
            Properties=SSMParameterProperties(Name="test-parameter-name",
                                              Type="String",
                                              Value="test-param-value"))

        cluster_name = "cluster-name"
        service = "service-name"
        dependencies = [ssm_parameter]
        role = "role-arn"

        # Exercise
        stack = create_ecs_service_invalidation_stack(
            cluster=cluster_name,
            service=service,
            dependencies=dependencies,
            restart_role=role,
            timeout=30,
        )

        # Verify Lambda Function
        functions = [
            r for r in stack.Resources.values() if isinstance(r, Function)
        ]
        assert len(
            functions) == 1, "There should be a Lambda Function resource"

        func = functions[0]
        assert re.search(r"restart.*ecs.*service", func.Properties.Handler,
                         re.I), "Lambda should restart an ECS service"
        assert func.Properties.Role == role

        # Verify Custom Resource
        custom_resources = [
            r for r in stack.Resources.values()
            if r["Type"].startswith("Custom::")
        ]
        assert (
            len(custom_resources) == 1
        ), "There should be a custom resource to restart the ECS service"

        restarter = custom_resources[0]
        assert restarter["Properties"]["ClusterArn"] == cluster_name
        assert restarter["Properties"]["ServiceArn"] == service

        # Verify dependencies for service restart
        dependent_values = _get_flattened_attributes(restarter)
        assert Ref(ssm_parameter) in dependent_values
        assert GetAtt(ssm_parameter, "Value") in dependent_values
Example #14
0
    def test_should_use_existing_igw_attached_to_vpc_in_stack(self):
        # Setup
        stack = Stack()
        stack.Resources["MyVPC"] = vpc = VPC()
        stack.Resources["MyGateway"] = igw = InternetGateway()
        stack.Resources[
            "MyGatewayAttachment"] = attachment = VPCGatewayAttachment(
                Properties=dict(InternetGatewayId=Ref(igw), VpcId=Ref(vpc)))

        # Exercise
        vpc.ensure_internet_gateway_exists(stack)

        # Verify
        self._verify_vpc_has_valid_internet_gateway(vpc)
        self._verify_resource_is_in_stack(vpc.internet_gateway,
                                          stack,
                                          unique=True)
        self._verify_resource_is_in_stack(vpc.internet_gateway_attachment,
                                          stack,
                                          unique=True)

        # Verify existing Internet Gateway is used
        assert vpc.internet_gateway is igw
        assert vpc.internet_gateway_attachment is attachment
Example #15
0
    def test_should_not_use_existing_non_igw_attached_to_vpc_in_stack(self):
        # Setup
        stack = Stack()
        stack.Resources["MyVPC"] = vpc = VPC()
        stack.Resources["MyGateway"] = vpn_gateway = VPNGateway(
            Properties=dict(Type="ipsec.1"))
        stack.Resources[
            "MyGatewayAttachment"] = attachment = VPCGatewayAttachment(
                Properties=dict(VpcId=Ref(vpc), VpnGatewayId=Ref(vpn_gateway)))

        # Exercise
        vpc.ensure_internet_gateway_exists(stack)

        # Verify
        self._verify_vpc_has_valid_internet_gateway(vpc)
        self._verify_resource_is_in_stack(vpc.internet_gateway,
                                          stack,
                                          unique=True)
        self._verify_resource_is_in_stack(vpc.internet_gateway_attachment,
                                          stack)

        # Verify existing Internet Gateway is not used
        assert attachment is not vpc.internet_gateway_attachment
        self._verify_resource_is_in_stack(attachment, stack)
    def test_nested_function_forces_longform_name(self):
        # TODO #37 do this with a Sub to be more realistic
        # Setup
        dumper = create_refsafe_dumper(Stack())
        func = Base64(Ref(AWS_StackName))

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert node.tag == dumper.DEFAULT_MAPPING_TAG
        assert len(node.value) == 1

        function_name = get_mapping_node_key(node, 0)
        assert function_name == "Fn::Base64"
Example #17
0
    def test_creates_stack_for_single_lambda(self):
        # TODO pull out some common helpers

        # Setup
        ssm_parameter = SSMParameter(
            Properties=SSMParameterProperties(Name="test-parameter-name",
                                              Type="String",
                                              Value="test-param-value"))

        function_name = "some-function-name"
        dependencies = [ssm_parameter]
        role = "role-arn"

        # Exercise
        stack = create_lambda_invalidation_stack(function=function_name,
                                                 dependencies=dependencies,
                                                 role=role)

        # Verify Lambda Function
        functions = [
            r for r in stack.Resources.values() if isinstance(r, Function)
        ]
        assert (
            len(functions) == 1
        ), "There should be a Lambda Function resource to perform the invalidation"

        func = functions[0]
        assert re.search(r"replace.*lambda.*context", func.Properties.Handler,
                         re.I), "Lambda should update an existing Lambda"
        assert func.Properties.Role == role

        # Verify Custom Resource
        custom_resources = [
            r for r in stack.Resources.values()
            if r["Type"].startswith("Custom::")
        ]
        assert (
            len(custom_resources) == 1
        ), "There should be a custom resource to update the target Lambda"

        updater = custom_resources[0]
        assert updater["Properties"]["FunctionName"] == function_name

        # Verify dependencies for Lambda update
        dependent_values = _get_flattened_attributes(updater)
        assert Ref(ssm_parameter) in dependent_values
        assert GetAtt(ssm_parameter, "Value") in dependent_values
    def test_stack_yaml_output(self):
        """An integration test, yay!"""
        # Setup
        data = SingleAttributeObject(one=42)
        stack = Stack(Resources=dict(Foo=data, Bar=Ref(data)))
        del stack.Metadata

        # Exercise
        output = stack.export("yaml")

        # Verify
        assert output == dedent("""
            ---
            AWSTemplateFormatVersion: '2010-09-09'
            Resources:
              Bar: !Ref Foo
              Foo:
                one: 42
            """)
    def test_yaml_output_with_nested_function(self):
        """Nested YAML functions can't both use the ! short form."""
        # Setup
        func = GetAZs(Ref(AWS_Region))
        data = SingleAttributeObject(one=func)
        stack = Stack(Resources=dict(SomeResource=data))
        del stack.Metadata

        # Exercise
        output = stack.export("yaml")

        # Verify
        assert output == dedent("""
            ---
            AWSTemplateFormatVersion: '2010-09-09'
            Resources:
              SomeResource:
                one:
                  Fn::GetAZs: !Ref AWS::Region
            """)
    def test_yaml_output_with_nested_function(self):
        """Nested YAML functions can't both use the ! short form."""
        # TODO #37 do this with a Sub to be more realistic

        # Setup
        func = Base64(Ref(AWS_StackName))
        data = SingleAttributeObject(one=func)
        stack = Stack(Resources=dict(SomeResource=data))
        del stack.Metadata

        # Exercise
        output = stack.export("yaml")

        # Verify
        assert output == dedent("""
            ---
            AWSTemplateFormatVersion: '2010-09-09'
            Resources:
              SomeResource:
                one:
                  Fn::Base64: !Ref AWS::StackName
            """)
    def test_yaml_output_with_nested_function(self):
        # Setup
        data = SingleAttributeObject(one=42)
        stack = Stack(Resources=dict(
            Foo=data, Bar=GetAtt(data, "ResourceAttrib1", Ref(AWS_Region))))
        del stack.Metadata

        # Exercise
        output = stack.export("yaml")

        # Verify
        assert output == dedent("""
            ---
            AWSTemplateFormatVersion: '2010-09-09'
            Resources:
              Bar:
                Fn::GetAtt:
                - Foo
                - ResourceAttrib1
                - !Ref AWS::Region
              Foo:
                one: 42
            """)
Example #22
0
def create_ecs_service_invalidation_stack(
    cluster,
    service,
    dependencies: List[SSMParameter],
    restart_role,
    timeout: int = 8 * 60,
) -> Stack:
    """Create CloudFormation resources to invalidate a single ECS service.

    This is accomplished by restarting the ECS service, which will force it to
    use the new parameters.

    Parameters:
        cluster: CloudFormation reference (eg. an ARN) to the cluster the service is in
        dependencies: SSM Parameters that this service uses
        service: CloudFormation reference (eg. an ARN) to the ECS service to invalidate
        restart_role: CloudFormation reference (eg. an ARN) to an IAM role
            that will be used to restart the ECS service.
        timeout: Number of seconds to wait for the ECS service to detect the
            new parameters successfully after restart. If we exceed this
            timeout it is presumed that the updated parameters are broken,
            and the changes will be rolled back. The default is 5 minutes,
            which should be enough for most services.
    """
    # TODO make restart_role optional, and create it on-the-fly if not provided
    # TODO find a way to share role and lambda between multiple calls in the same stack? can de-dupe/cache based on identity in the final stack
    # TODO get Lambda handler to have an internal timeout as well?

    stack = Stack(Description="Invalidate ECS service after parameter update")

    # Create an inline Lambda that can restart an ECS service, since this
    # isn't built-in CloudFormation functionality.
    stack.Resources[
        "RestartLambda"] = restart_service_lambda = Function.create_from_python_function(
            handler=restart_ecs_service_resource_handler, Role=restart_role)

    # The Lambda timeout should be a bit longer than the restart timeout,
    # to give some leeway.
    restart_service_lambda.Properties.Timeout = timeout + 15

    # Create a custom resource to restart the ECS service.
    #
    # We don't want the service restart to happen until the parameters have
    # all been created, so we need to have the Restarter resource depend on
    # the parameters (either implicitly or via DependsOn). We also want the
    # restart to only happen if the parameters have actually changed - this
    # can be done if we make the SSM Parameters be part of the resource
    # specification (both the key and the value).
    stack.Resources["Restarter"] = dict(
        Type="Custom::RestartEcsService",
        Properties=dict(
            ServiceToken=GetAtt(restart_service_lambda, "Arn"),
            ClusterArn=cluster,
            ServiceArn=service,
            IgnoredParameterNames=[Ref(p) for p in dependencies],
            IgnoredParameterKeys=[GetAtt(p, "Value") for p in dependencies],
        ),
    )

    # TODO consider creating a waiter anyway, so that the timeout is strictly reliable

    return stack
Example #23
0
    def ensure_internet_gateway_exists(self, stack: Stack):
        """Ensure there is an internet gateway attached to this VPC as part of the stack."""
        # If we have already attached a gateway to this VPC, then there is
        # nothing more to do
        if self._internet_gateway is not None:
            # Check that the internet gateway is in the desired stack
            if not [
                res for res in stack.Resources.values() if res is self._internet_gateway
            ]:
                raise RuntimeError("Existing InternetGateway is not in this stack")
            if not [
                res
                for res in stack.Resources.values()
                if res is self._internet_gateway_attachment
            ]:
                raise RuntimeError(
                    "Existing VPCGatewayAttachment for InternetGateway is not in this stack"
                )

            return

        # Look for an existing gateway attached to this VPC
        for res in stack.Resources.values():
            if (
                isinstance(res, VPCGatewayAttachment)
                and res.Properties.VpcId == Ref(self)
                and res.Properties.InternetGatewayId
            ):
                self._internet_gateway_attachment = res

                # Try to dodgily unwrap the internet gateway...
                gateway_ref = res.Properties.InternetGatewayId
                if not isinstance(gateway_ref, Ref):
                    raise RuntimeError("Can't deal with direct ID references!")
                if not isinstance(gateway_ref._data, InternetGateway):
                    raise RuntimeError(
                        "There's something weird attached to this VPC instead of an Internet Gateway"
                    )
                self._internet_gateway = gateway_ref._data

                return

        # Create an Internet Gateway
        self._internet_gateway = InternetGateway()
        if self.name:
            # Copy the Name of the VPC if one has already been set
            self._internet_gateway.name = self.name

        igw_stack_name = stack.get_logical_name(self, resources_only=True) + "IGW"
        if igw_stack_name in stack.Resources:
            raise RuntimeError(f"There's already a resource named {igw_stack_name}")
        stack.Resources[igw_stack_name] = self._internet_gateway

        # Attach the gateway to this VPC
        self._internet_gateway_attachment = VPCGatewayAttachment(
            Properties=VPCGatewayAttachmentProperties(
                InternetGatewayId=Ref(self._internet_gateway), VpcId=Ref(self)
            )
        )

        attachment_stack_name = igw_stack_name + "Attachment"
        if attachment_stack_name in stack.Resources:
            raise RuntimeError(
                f"There's already a resource named {attachment_stack_name}"
            )
        stack.Resources[attachment_stack_name] = self._internet_gateway_attachment
class TestJoin:
    """Test behaviour/output of the Join function."""

    # Test Helpers
    # ------------

    def _verify_delimiter(self, node, delimiter):
        assert node.value[0].value == delimiter

    def _verify_values(self, node, values: list):
        actual_values = [n.value for n in node.value[1].value]
        assert actual_values == values

    # YAML Output
    # -----------

    def test_uses_abbreviated_tag(self):
        # Setup
        dumper = create_refsafe_dumper(None)
        func = Join(".", "foo", "bar")

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert node.tag == "!Join"

    def test_supplied_values_are_used_in_output(self):
        # Setup
        dumper = create_refsafe_dumper(None)
        expected_delimiter = "."
        expected_values = ["foo", "bar", "baz"]
        func = Join(expected_delimiter, *expected_values)

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert isinstance(node, SequenceNode)
        assert len(node.value) == 2
        self._verify_delimiter(node, expected_delimiter)
        self._verify_values(node, expected_values)

    def test_yaml_output_as_args(self):
        # Setup
        func = Join(".", "foo", "bar")
        data = SingleAttributeObject(one=func)

        # Exercise
        output = data.export("yaml")

        # Verify
        assert output == dedent("""
            ---
            one: !Join
            - .
            - - foo
              - bar
            """)

    def test_yaml_output_as_list_parameter(self):
        # Setup
        func = Join(".", ["foo", "bar"])
        data = SingleAttributeObject(one=func)

        # Exercise
        output = data.export("yaml")

        # Verify
        assert output == dedent("""
            ---
            one: !Join
            - .
            - - foo
              - bar
            """)

    # Delimiter
    # ---------

    @pytest.mark.parametrize("value", [None, Ref(AWS_StackName)])
    def test_delimiter_must_be_a_string(self, value):
        with pytest.raises(TypeError,
                           match=re.compile(r"delimiter.*string", re.I)):
            _ = Join(value, "foo", "bar")

    def test_delimiter_can_be_empty_string(self):
        # Setup
        dumper = create_refsafe_dumper(None)
        func = Join("", "foo", "bar")

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        self._verify_delimiter(node, "")

    def test_yaml_output_when_delimiter_is_empty(self):
        # Setup
        func = Join("", ["foo", "bar"])
        data = SingleAttributeObject(one=func)

        # Exercise
        output = data.export("yaml")

        # Verify
        assert output == dedent("""
            ---
            one: !Join
            - ''
            - - foo
              - bar
            """)

    def test_delimiter_can_have_more_than_one_character(self):
        # Setup
        dumper = create_refsafe_dumper(None)
        func = Join("::", "foo", "bar")

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        self._verify_delimiter(node, "::")

    def test_delimiter_should_not_be_huge(self):
        delimiter = "syzygy" * 10

        with pytest.raises(ValueError,
                           match=re.compile(r"delimiter.*(large|long)", re.I)):
            _ = Join(delimiter, "foo", "bar")

    # Input Values
    # ------------

    def test_input_values_can_be_a_single_list_parameter(self):
        # Setup
        dumper = create_refsafe_dumper(None)
        expected_delimiter = "."
        expected_values = ["foo", "bar", "baz"]
        func = Join(expected_delimiter, expected_values)

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert isinstance(node, SequenceNode)
        assert len(node.value) == 2
        self._verify_delimiter(node, expected_delimiter)
        self._verify_values(node, expected_values)

    def test_input_values_can_include_functions(self):
        # Setup
        dumper = create_refsafe_dumper(None)
        func = Join(".", [Base64("Something"), "foo", "bar"])

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert node.value[1].value[0].tag == "!Base64"
        self._verify_values(node, ["Something", "foo", "bar"])

    def test_input_values_must_contain_multiple_values(self):
        with pytest.raises(ValueError, match=r"values.*at least 2"):
            _ = Join(".", "foo")

    def test_input_values_as_list_must_contain_multiple_values(self):
        with pytest.raises(ValueError, match=r"values.*at least 2"):
            _ = Join(".", ["foo"])