Пример #1
0
def _create_embedded_invalidations(appconfig: dict, stack: Stack):
    """Invalidate the cache in applications that use some of these parameters
    (by restarting the application), as specified by configuration embedded
    inline in the input file.
    """
    invalidatable_services = appconfig.get(".ssmash-config",
                                           {}).get("invalidations")
    if not invalidatable_services:
        return

    clean_config = dict(appconfig)
    clean_config.pop(".ssmash-config", None)
    invalidated_resources = _get_invalidated_resources(clean_config)

    for appname, appresources in invalidated_resources.items():
        invalidator = invalidatable_services.get(appname)
        if not invalidator:
            # TODO this error message is a bit fragile
            raise ValueError(
                f"Parameter {appresources[0].Properties.Name} invalidates service {appname}, but that service is not defined."
            )

        stack.merge_stack(
            invalidator.create_resources(appresources).with_prefixed_names(
                "Invalidate" + clean_logical_name(appname)))
Пример #2
0
    def test_cannot_merge_if_sam_transform_version_is_different(self):
        source = Stack(Transform="123")
        target = Stack(Transform="456")

        # Exercise & Verify
        with pytest.raises(StackMergeError) as excinfo:
            target.merge_stack(source)

        assert "transform version" in str(excinfo.value).lower()
Пример #3
0
    def test_cannot_merge_if_template_version_is_different(self):
        source = Stack(AWSTemplateFormatVersion="123")
        target = Stack(AWSTemplateFormatVersion="456")

        # Exercise & Verify
        with pytest.raises(StackMergeError) as excinfo:
            target.merge_stack(source)

        assert "template version" in str(excinfo.value).lower()
Пример #4
0
    def test_does_not_copy_description(self):
        # Setup
        source = Stack(Description="Source Description")
        original_description = "Target Description"
        target = Stack(Description=original_description)

        # Exercise
        target.merge_stack(source)

        # Verify
        assert target.Description == original_description
Пример #5
0
    def test_item_is_added_to_the_target_stack(self, stack_attribute, item):
        # Setup
        item_name = "SomeChildProperty"
        source = Stack()
        source[stack_attribute] = {item_name: item}
        target = Stack()

        # Exercise
        target.merge_stack(source)

        # Verify
        assert len(target[stack_attribute]) == 1
        assert target[stack_attribute][item_name] is item
Пример #6
0
    def test_does_not_copy_flying_circus_metadata(self):
        # Setup
        source = Stack()
        source.Metadata["FlyingCircus"]["version"] = "gamma-gamma"

        target = Stack()
        original_fc_data = target.Metadata["FlyingCircus"]

        # Exercise
        target.merge_stack(source)

        # Verify
        assert target.Metadata["FlyingCircus"] == original_fc_data
Пример #7
0
    def test_item_is_not_removed_from_source_stack(self, stack_attribute,
                                                   item):
        # Setup
        item_name = "SomeChildProperty"
        source = Stack()
        source[stack_attribute] = {item_name: item}
        target = Stack()

        # Exercise
        target.merge_stack(source)

        # Verify
        assert len(source[stack_attribute]) == 1
        assert source[stack_attribute][item_name] is item
Пример #8
0
    def test_cannot_merge_if_two_outputs_have_the_same_export_name(self):
        # Setup
        export_name = "SpecialExportedValue"

        source_output = Output(Value=123, Export={"Name": export_name})
        source = Stack(Outputs={"SourceOutput": source_output})

        target_output = Output(Value=987, Export={"Name": export_name})
        target = Stack(Outputs={"TargetOutput": target_output})

        # Exercise & Verify
        with pytest.raises(StackMergeError) as excinfo:
            target.merge_stack(source)

        assert "the target stack already has exports" in str(
            excinfo.value).lower()
Пример #9
0
    def test_cannot_merge_if_logical_name_is_already_used_for_that_item_type(
            self, stack_attribute, item):
        # Setup
        item_name = "SomeChildProperty"
        source = Stack()
        source[stack_attribute] = {item_name: item}

        existing_item = copy(item)
        target = Stack()
        target[stack_attribute] = {item_name: existing_item}

        # Exercise & Verify
        with pytest.raises(StackMergeError) as excinfo:
            target.merge_stack(source)

        assert "in this stack already has an item with the logical name" in str(
            excinfo.value)
Пример #10
0
    def test_does_not_clobber_existing_items_in_target_stack(
            self, stack_attribute, item):
        # Setup
        item_name = "SomeChildProperty"
        source = Stack()
        source[stack_attribute] = {item_name: item}

        existing_item = copy(item)
        existing_item_name = "SomeOldItem"
        target = Stack()
        target[stack_attribute] = {existing_item_name: existing_item}

        # Exercise
        target.merge_stack(source)

        # Verify
        assert len(target[stack_attribute]) == 2
        assert target[stack_attribute][item_name] is item
        assert target[stack_attribute][existing_item_name] is existing_item
Пример #11
0
    def test_merge_returns_target_stack(self):
        # Setup
        source = Stack(Resources={"SomeResource": SimpleResource()})
        target = Stack()

        # Exercise
        result = target.merge_stack(source)

        # Verify
        assert target is result
Пример #12
0
def autoscaling_group_by_cpu(low=20, high=80):
    """Create an auto-scaling group that scales based on it's CPU load."""
    # TODO this is more like a recipe than a basic service
    stack = Stack(
        # TODO generate description by auto-breaking the line with the (not-yet-existent) reflow function instead
        Description=dedent("""
            Deploy an auto-scaling group that scales based on lower and upper CPU usage
            thresholds.
            """),
    )

    launch_config = stack.Resources["LaunchConfiguration"] = LaunchConfiguration(
        Properties=dict(
            ImageId="ami-1a668878",  # Amazon Linux 2017.09.01 in ap-southeast-2
            InstanceType="t2.micro",  # TODO consider making this a lookup value
            # TODO KeyName would probably be helpful
        ),
    )

    asg = stack.Resources["AutoScalingGroup"] = AutoScalingGroup(
        Properties=dict(
            AvailabilityZones=fn.GetAZs(fn.Ref(AWS_Region)),
            LaunchConfigurationName=fn.Ref(launch_config),
            MinSize=1,
            MaxSize=3,
        ),
    )

    stack.merge_stack(
        simple_scaling_policy(
            cloudwatch.Alarms.high_cpu(threshold=high), fn.Ref(asg), downscale=False,
        ).with_prefixed_names("ScaleUp")
    ).merge_stack(
        simple_scaling_policy(
            cloudwatch.Alarms.low_cpu(threshold=low), fn.Ref(asg), downscale=True,
        ).with_prefixed_names("ScaleDown")
    )

    return stack
Пример #13
0
def _create_ssm_parameters(appconfig: dict, stack: Stack):
    """Create SSM parameters for every item in the application configuration"""
    stack.merge_stack(
        convert_hierarchy_to_ssm(appconfig).with_prefixed_names("SSMParam")
    )
Пример #14
0
def _create_ssm_parameters(appconfig: dict, stack: Stack):
    """Create SSM parameters for every item in the application configuration"""
    clean_config = dict(appconfig)
    clean_config.pop(".ssmash-config", None)
    stack.merge_stack(
        convert_hierarchy_to_ssm(clean_config).with_prefixed_names("SSMParam"))