Exemple #1
0
    def test_should_dereference_cloudformation_imports(self):
        # Setup
        cluster_export = "some-cluster-export"
        service_export = "some-service-export"
        role_export = "some-role-export"

        # Exercise
        with Patchers.create_ecs_service_invalidation_stack(
        ) as invalidation_mock:
            result = self.run_script_with_invalidation_params(extra_args=[
                "--cluster-import",
                cluster_export,
                "--service-import",
                service_export,
                "--role-import",
                role_export,
            ])

        # Verify
        assert result.exit_code == 0

        invalidation_mock.assert_called_with(
            cluster=ImportValue(cluster_export),
            service=ImportValue(service_export),
            dependencies=ANY,
            restart_role=ImportValue(role_export),
        )
    def test_imports_of_different_names_are_not_equal(self):
        # Setup
        import1 = ImportValue("exported-name-1")
        import2 = ImportValue("exported-name-2")

        # Verify
        assert import1 != import2
        assert not (import1 == import2)
        assert hash(import1) != hash(import2)
    def test_imports_of_same_name_are_equal(self):
        # Setup
        name = "some-exported-name"
        import1 = ImportValue(name)
        import2 = ImportValue(name)

        # Verify
        assert import1 == import2
        assert not (import1 != import2)
        assert hash(import1) == hash(import2)
    def test_supplied_export_name_is_used_in_output(self):
        # Setup
        dumper = create_refsafe_dumper(None)
        export_name = "some_export_name"
        func = ImportValue(export_name)

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert node.value == export_name
    def test_uses_abbreviated_tag_for_yaml_scalar(self):
        # Setup
        dumper = create_refsafe_dumper(None)
        func = ImportValue("some_export_name")

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert isinstance(node, ScalarNode)
        assert node.tag == "!ImportValue"
    def test_nested_function_forces_longform_name(self):
        # Setup
        dumper = create_refsafe_dumper(Stack())
        func = ImportValue(Sub("${AWS::Region}-SharedLogBucket"))

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        assert node.tag == dumper.DEFAULT_MAPPING_TAG
        assert len(node.value) == 1

        function_name = get_mapping_node_key(node, 0)
        assert function_name == "Fn::ImportValue"
Exemple #7
0
def get_cfn_resource_from_options(
        option_name: str, arn: Optional[str],
        export_name: Optional[str]) -> Union[str, ImportValue]:
    """Get a CloudFormation resource from one of several ways to specify it.

    Parameters:
        arn: The physical name or ARN of the underlying resources
        export_name: The name of a CloudFormation Export that can be
            dereferenced to access the resource.
        option_name: The name of the option, used in CLI error messages.

    Raises:
        ValueError: If the supplied data is inconsistent.
    """
    if arn and export_name:
        raise ValueError(
            f"The {option_name} may not be specified using both a name/ARN and a CloudFormation Export."
        )

    if export_name:
        result = ImportValue(export_name)
    else:
        result = arn

    if not result:
        raise ValueError(
            f"The {option_name} must be specified using either a name/ARN, or a CloudFormation Export."
        )
    return result
    def test_hash_is_different_from_hash_of_export_name(self):
        # Setup
        name = "some-exported-name"
        func = ImportValue(name)

        # Verify
        assert hash(name) != hash(func)
    def test_import_is_not_equal_to_arbitrary_object(self, constructor):
        # Setup
        name = "some-exported-name"
        func = ImportValue(name)
        other = constructor(name)

        # Verify
        assert func != other
        assert other != func
        assert not (func == other)
        assert not (other == func)
        assert hash(func) != hash(other)
Exemple #10
0
    def test_should_dereference_cloudformation_imports(self):
        # Setup
        function_export = "some-function-export"
        role_export = "some-role-export"

        # Exercise
        with Patchers.create_lambda_invalidation_stack() as invalidation_mock:
            result = self.run_script_with_invalidation_params(extra_args=[
                "--function-import",
                function_export,
                "--role-import",
                role_export,
            ])

        # Verify
        assert result.exit_code == 0

        invalidation_mock.assert_called_with(
            function=ImportValue(function_export),
            dependencies=ANY,
            role=ImportValue(role_export),
        )
    def test_yaml_output(self):
        # Setup
        func = ImportValue("SomeStack-export_name")
        data = SingleAttributeObject(one=func)

        # Exercise
        output = data.export("yaml")

        # Verify
        assert output == dedent("""
            ---
            one: !ImportValue SomeStack-export_name
            """)
    def test_variable_map_can_include_functions(self):
        # Setup
        dumper = create_refsafe_dumper(None)
        func = Sub(
            "arn:aws:ec2:${AWS::Region}:${account}:vpc/${vpc}",
            account="123456789012",
            vpc=ImportValue("my-vpc-id"),
        )

        # Exercise
        node = func.as_yaml_node(dumper)

        # Verify
        varmap_node = node.value[1]
        vpc_var_node = varmap_node.value[1]
        assert (vpc_var_node[1].tag == "!ImportValue"
                )  # The dict value is the second element in a tuple
    def test_yaml_output_with_nested_function(self):
        """Nested YAML functions can't both use the ! short form."""
        # Setup
        func = ImportValue(Sub("${AWS::Region}-SharedLogBucket"))
        data = SingleAttributeObject(one=func)
        stack = Stack(Resources=dict(SomeResource=data))
        del stack.Metadata

        # Exercise
        output = stack.export("yaml")

        # Verify
        assert output == dedent("""
            ---
            AWSTemplateFormatVersion: '2010-09-09'
            Resources:
              SomeResource:
                one:
                  Fn::ImportValue: !Sub '${AWS::Region}-SharedLogBucket'
            """)