Ejemplo n.º 1
0
 def test_validate_s3(self):
     Code(S3Bucket="bucket", S3Key="key").validate()
     Code(
         S3Bucket="bucket",
         S3Key="key",
         S3ObjectVersion="version"
     ).validate()
Ejemplo n.º 2
0
def lambda_function(**kwargs):
    function = Function('LambdaFunction',
                        Code=Code(S3Bucket='replace-me', S3Key='replace-me'),
                        Handler='function.lambda_handler',
                        MemorySize='256',
                        Timeout=30)
    for key in kwargs.keys():
        if key == 'Layers':
            layers = []
            for layer in kwargs[key]:
                layers.append(filter_layer(layer))
            print(layers)
            setattr(function, key, layers)
        elif key == 'S3Bucket' or key == 'S3Key':
            setattr(function, 'Code',
                    Code(S3Bucket=kwargs['S3Bucket'], S3Key=kwargs['S3Key']))
        elif key == 'Role':
            setattr(function, 'Role', filter_iamrole(kwargs[key]))
        elif key == 'Environment':
            if isinstance(kwargs[key], dict):
                setattr(function, key, Environment(Variables=kwargs[key]))
            elif isinstance(kwargs[key], Environment):
                setattr(function, key, kwargs[key])
        else:
            setattr(function, key, kwargs[key])
    return function
def getLambda( name: str
             , src: Source
             , role: Role
             , stack: str
             , stage: str
             , env_vars: dict
             , config: dict
             ) -> Function:
  ''' Takes the source code and an IAM role and creates a lambda function '''
  code = Code( S3Bucket = src[0] 
             , S3Key = src[1]
             )
  func_name = "".join([name, stage])
  env_vars = Environment( Variables = env_vars )
  memory = 128
  if "MemorySize" in config:
    memory = config["MemorySize"]
  timeout = 60
  if "Timeout" in config:
    timeout = config["Timeout"]
  
  return Function( toAlphanum(name)
                 , FunctionName = func_name
                 , Handler = config["Handler"]
                 , Code = code
                 , Role = GetAtt(role, "Arn")
                 , Runtime = "python3.6"
                 , Environment = env_vars
                 , MemorySize = memory
                 , Timeout = timeout
                 )
Ejemplo n.º 4
0
 def test_zip_file(self):
     lambda_func = Function(
         "AMIIDLookup",
         Handler="index.handler",
         Role=GetAtt("LambdaExecutionRole", "Arn"),
         Code=Code(
             ZipFile=Join(
                 "",
                 [
                     "var response = require('cfn-response');",
                     "exports.handler = function(event, context) {",
                     "  var input = parseInt(event.ResourceProperties.Input);",
                     "  var responseData = {Value: input * 5};",
                     "  response.send("
                     "    event, context, response.SUCCESS, responseData"
                     "  );",
                     "};",
                 ],
             ),
         ),
         Runtime="nodejs",
         Timeout="25",
     )
     t = Template()
     t.add_resource(lambda_func)
     t.to_json()
Ejemplo n.º 5
0
def gen_postgis_function():
    PostGisFunction = Function(
        "PostGisProvisionerFunction",
        Code=Code(
            S3Bucket=Ref("BucketName"),
            S3Key=Ref("BucketKey"),
        ),
        FunctionName=Sub("${AWS::StackName}-PostGisProvisioner"),
        Handler="postgis_provisioner.lambda_handler",
        Role=GetAtt("PostgisProvisionerExecutionRole", "Arn"),
        Timeout="60",
        Runtime="python3.6",
        VpcConfig=VPCConfig(
            SecurityGroupIds=[Ref("PostGisProvisionerSg")],
            SubnetIds=[
                Select(
                    0,
                    Split(
                        ",",
                        ImportValue(
                            Sub("${NetworkName}-network-vpc-PrivateSubnets")))
                ),
                Select(
                    1,
                    Split(
                        ",",
                        ImportValue(
                            Sub("${NetworkName}-network-vpc-PrivateSubnets"))))
            ]))
    return PostGisFunction
Ejemplo n.º 6
0
 def test_package_type_image(self):
     Function(
         "TestFunction",
         Code=Code(ImageUri="something"),
         PackageType="Image",
         Role=GetAtt("LambdaExecutionRole", "Arn"),
     ).validate()
Ejemplo n.º 7
0
 def test_validate_zip_and_s3(self):
     s3_props = [
         {
             "S3Bucket": "bucket"
         },
         {
             "S3Key": "key"
         },
         {
             "S3ObjectVersion": "version"
         },
         {
             "S3Bucket": "bucket",
             "S3Key": "key"
         },
         {
             "S3Bucket": "bucket",
             "S3ObjectVersion": "version"
         },
         {
             "S3Key": "key",
             "S3ObjectVersion": "version"
         },
         {
             "S3Bucket": "bucket",
             "S3Key": "key",
             "S3ObjectVersion": "version"
         },
     ]
     for props in s3_props:
         with self.assertRaises(ValueError):
             Code(ZipFile="something", **props).validate()
    def __init__(self, prefix: str, lambda_under_deployment: Function) -> None:
        """
        Constructor.

        :param prefix: A prefix for deployment lambda resource names.
        :param lambda_under_deployment: An AWS Lambda function to execute deployments against.
        """
        self.lambda_role = Role(
            prefix + "DeploymentLambdaRole",
            Path="/",
            Policies=[Policy(
                PolicyName=prefix + "DeploymentLambdaRole",
                PolicyDocument={
                    "Version": "2012-10-17",
                    "Statement": [{
                        "Action": ["logs:*"],
                        "Resource": "arn:aws:logs:*:*:*",
                        "Effect": "Allow"
                    }, {
                        "Action": ["lambda:UpdateFunctionCode"],
                        "Resource": "*",
                        "Effect": "Allow"
                    }, {
                        "Action": ["s3:*"],
                        "Resource": "*",
                        "Effect": "Allow"
                    }]
                })],
            AssumeRolePolicyDocument={"Version": "2012-10-17", "Statement": [
                {
                    "Action": ["sts:AssumeRole"],
                    "Effect": "Allow",
                    "Principal": {
                        "Service": [
                            "lambda.amazonaws.com",
                        ]
                    }
                }
            ]},
        )

        self.function = Function(
            prefix + "DeploymentLambda",
            Code=Code(ZipFile=self.__read_template()),
            Handler='index.handler',
            Role=GetAtt(self.lambda_role, "Arn"),
            Runtime='python3.6',
            MemorySize='128',
            FunctionName=prefix + 'DeploymentLambda',
            Timeout='10',
            Environment=Environment(
                Variables={
                    'LAMBDA_FUNCTION_NAME': Ref(lambda_under_deployment)
                }
            ),
            Description=(
                f'Deployment lambda which updates lambda under deployment function code '
                f'from an output from a ci/cd pipeline for {prefix.lower()}.'
            )
        )
Ejemplo n.º 9
0
 def _setup_firehose_custom_resource(self):
     # Setup the FirehoseLambda CloudFormation Custom Resource
     self.FirehoseLambdaCFExecRole = self.add_resource(
         Role(
             "FirehoseLambdaCFRole",
             AssumeRolePolicyDocument=Policy(
                 Version="2012-10-17",
                 Statement=[
                     Statement(Effect=Allow,
                               Action=[AssumeRole],
                               Principal=Principal("Service",
                                                   "lambda.amazonaws.com"))
                 ]),
             Path="/",
         ))
     self.FirehoseLambdaPolicy = self.add_resource(
         PolicyType("FirehoseCFPolicy",
                    PolicyName="FirehoseLambdaCFRole",
                    PolicyDocument=Policy(
                        Version="2012-10-17",
                        Statement=[
                            Statement(Effect=Allow,
                                      Action=[
                                          Action("logs", "CreateLogGroup"),
                                          Action("logs", "CreateLogStream"),
                                          Action("logs", "PutLogEvents"),
                                      ],
                                      Resource=["arn:aws:logs:*:*:*"]),
                            Statement(Effect=Allow,
                                      Action=[
                                          Action("firehose",
                                                 "CreateDeliveryStream"),
                                          Action("firehose",
                                                 "DeleteDeliveryStream"),
                                          Action("firehose",
                                                 "ListDeliveryStreams"),
                                          Action("firehose",
                                                 "DescribeDeliveryStream"),
                                          Action("firehose",
                                                 "UpdateDestination"),
                                      ],
                                      Resource=["*"])
                        ]),
                    Roles=[Ref(self.FirehoseLambdaCFExecRole)],
                    DependsOn="FirehoseLambdaCFRole"))
     self.FirehoseCFCustomResource = self.add_resource(
         Function(
             "FirehoseCustomResource",
             Description=(
                 "Creates, updates, and deletes Firehose delivery streams"),
             Runtime="python2.7",
             Timeout=300,
             Handler="lambda_function.lambda_handler",
             Role=GetAtt(self.FirehoseLambdaCFExecRole, "Arn"),
             Code=Code(
                 S3Bucket="cloudformation-custom-resources",
                 S3Key="firehose_lambda.zip",
             ),
             DependsOn="FirehoseCFPolicy"))
Ejemplo n.º 10
0
 def test_package_type_invalid(self):
     with self.assertRaises(ValueError):
         Function(
             "TestFunction",
             Code=Code(ImageUri="something"),
             PackageType="Invalid",
             Role=GetAtt("LambdaExecutionRole", "Arn"),
         ).validate()
Ejemplo n.º 11
0
def gen_lambda_function():
    function = Function("CloudformationCleanupFunction",
                        Code=Code(ZipFile=Join("", get_code())),
                        Handler="index.lambda_handler",
                        Role=GetAtt("LambdaExecutionRole", "Arn"),
                        Runtime="python3.6",
                        MemorySize=Ref(parameters['LambdaMemorySize']),
                        Timeout=Ref(parameters['LambdaTimeout']),
                        Tags=gen_tags("CloudFormation-Cleaner"))
    return function
Ejemplo n.º 12
0
 def test_check_zip_file(self):
     positive_tests = [
         "a" * 4096,
         Join("", ["a" * 4096]),
         Join("", ["a", 10]),
         Join("", ["a" * 4096, Ref("EmptyParameter")]),
         Join("ab", ["a" * 2047, "a" * 2047]),
         GetAtt("foo", "bar"),
     ]
     for z in positive_tests:
         Code.check_zip_file(z)
     negative_tests = [
         "a" * 4097,
         Join("", ["a" * 4097]),
         Join("", ["a" * 4097, Ref("EmptyParameter")]),
         Join("abc", ["a" * 2047, "a" * 2047]),
     ]
     for z in negative_tests:
         with self.assertRaises(ValueError):
             Code.check_zip_file(z)
Ejemplo n.º 13
0
 def test_validate_s3_missing_required(self):
     s3_props = [
         {"S3Bucket": "bucket"},
         {"S3Key": "key"},
         {"S3ObjectVersion": "version"},
         {"S3Bucket": "bucket", "S3ObjectVersion": "version"},
         {"S3Key": "key", "S3ObjectVersion": "version"},
     ]
     for props in s3_props:
         with self.assertRaises(ValueError):
             Code(**props).validate()
Ejemplo n.º 14
0
 def test_check_zip_file(self):
     positive_tests = [
         'a'*4096,
         Join('', ['a'*4096]),
         Join('', ['a', 10]),
         Join('', ['a'*4096, Ref('EmptyParameter')]),
         Join('ab', ['a'*2047, 'a'*2047]),
         GetAtt('foo', 'bar'),
     ]
     for z in positive_tests:
         Code.check_zip_file(z)
     negative_tests = [
         'a'*4097,
         Join('', ['a'*4097]),
         Join('', ['a'*4097, Ref('EmptyParameter')]),
         Join('abc', ['a'*2047, 'a'*2047]),
     ]
     for z in negative_tests:
         with self.assertRaises(ValueError):
             Code.check_zip_file(z)
Ejemplo n.º 15
0
 def test_check_zip_file(self):
     positive_tests = [
         'a'*4096,
         Join('', ['a'*4096]),
         Join('', ['a', 10]),
         Join('', ['a'*4096, Ref('EmptyParameter')]),
         Join('ab', ['a'*2047, 'a'*2047]),
         GetAtt('foo', 'bar'),
     ]
     for z in positive_tests:
         Code.check_zip_file(z)
     negative_tests = [
         'a'*4097,
         Join('', ['a'*4097]),
         Join('', ['a'*4097, Ref('EmptyParameter')]),
         Join('abc', ['a'*2047, 'a'*2047]),
     ]
     for z in negative_tests:
         with self.assertRaises(ValueError):
             Code.check_zip_file(z)
Ejemplo n.º 16
0
    def build_function(self):
        name = self.template.add_parameter(Parameter("Name", Type="String"))
        role = self.template.add_parameter(Parameter("Role", Type="String"))

        kwargs = self.sceptre_user_data
        kwargs["FunctionName"] = Ref(name)
        kwargs["Role"] = Ref(role)
        kwargs["Code"] = Code(**kwargs["Code"])

        function = self.template.add_resource(Function("Function", **kwargs))

        self.template.add_output(Output("Arn", Value=GetAtt(function, "Arn")))
Ejemplo n.º 17
0
Archivo: s3.py Proyecto: nektos/stacker
def _upload_file(s3_client, path, bucket, prefix=None, acl='private'):
    file_name, file_ext = os.path.splitext(os.path.basename(path))
    file_hash = _hash_file(path)
    key = os.path.join(prefix, f"{file_name}-{file_hash}{file_ext}")

    if _head_object(s3_client, bucket, key):
        logger.debug('object %s already exists, not uploading', key)
    else:
        logger.info(f"upload '{path}' to s3://{bucket}/{key}")
        with open(path, 'rb') as f:
            s3_client.put_object(Bucket=bucket, Key=key, Body=f, ACL=acl)
    return Code(S3Bucket=bucket, S3Key=key)
Ejemplo n.º 18
0
 def _setup_s3writer_custom_resource(self):
     self.S3WriterLambdaCFExecRole = self.add_resource(
         Role(
             "S3WriterLambdaCFRole",
             AssumeRolePolicyDocument=Policy(
                 Version="2012-10-17",
                 Statement=[
                     Statement(Effect=Allow,
                               Action=[AssumeRole],
                               Principal=Principal("Service",
                                                   "lambda.amazonaws.com"))
                 ]),
             Path="/",
         ))
     self.S3WriterCFPolicy = self.add_resource(
         PolicyType("S3WriterCFPolicy",
                    PolicyName="S3WriterLambdaCFRole",
                    PolicyDocument=Policy(
                        Version="2012-10-17",
                        Statement=[
                            Statement(Effect=Allow,
                                      Action=[
                                          Action("logs", "CreateLogGroup"),
                                          Action("logs", "CreateLogStream"),
                                          Action("logs", "PutLogEvents"),
                                      ],
                                      Resource=["arn:aws:logs:*:*:*"]),
                            Statement(Effect=Allow,
                                      Action=[
                                          s3.DeleteObject,
                                          s3.ListBucket,
                                          s3.PutObject,
                                          s3.GetObject,
                                      ],
                                      Resource=["*"])
                        ]),
                    Roles=[Ref(self.S3WriterLambdaCFExecRole)],
                    DependsOn="S3WriterLambdaCFRole"))
     self.S3WriterCFCustomResource = self.add_resource(
         Function(
             "S3WriterCustomResource",
             Description=(
                 "Creates, updates, and deletes S3 Files with custom content"
             ),
             Runtime="python2.7",
             Timeout=300,
             Handler="lambda_function.lambda_handler",
             Role=GetAtt(self.S3WriterLambdaCFExecRole, "Arn"),
             Code=Code(
                 S3Bucket="cloudformation-custom-resources",
                 S3Key="s3writer_lambda.zip",
             ),
             DependsOn="S3WriterCFPolicy"))
    def add_lambda(self, name, s3_bucket, role):
        s3_key = self.t.add_parameter(Parameter(
            f"{name}KeyS3",
            Description=f"S3 key for lambda function: {name}",
            Type="String"
        ))

        s3_version = self.t.add_parameter(Parameter(
            f"{name}ObjectVersionS3",
            Description=f"S3 object version ID for lambda function: {name}",
            Type="String"
        ))

        function = self.t.add_resource(Function(
            f"{name}Lambda",
            FunctionName=name,
            Handler="index.handler",
            Runtime="python3.6",
            Role=GetAtt(role, "Arn"),
            Code=Code(
                S3Bucket=Ref(s3_bucket),
                S3Key=Ref(s3_key),
                S3ObjectVersion=Ref(s3_version)
            )
        ))

        version = self.t.add_resource(CustomLambdaVersion(
            f"{name}LambdaVersion",
            ServiceToken=GetAtt(self.custom_lambda_version_lambda, "Arn"),
            FunctionName=Ref(function),
            S3ObjectVersion=Ref(s3_version)
        ))

        uri = Join('', [
            'arn:aws:apigateway:',
            Ref('AWS::Region'),
            ':lambda:path/2015-03-31/functions/arn:aws:lambda:',
            Ref('AWS::Region'),
            ':',
            Ref('AWS::AccountId'),
            ':function:',
            Ref(function),
            ':',
            GetAtt(version, "Version"),
            '/invocations',
        ])

        self.t.add_output(Output(
            f"{name}LambdaURI",
            Value=uri,
            Description=f"{name}LambdaURI"
        ))
Ejemplo n.º 20
0
    def __init__(self, sceptre_user_data):
        self.template = Template()

        name = self.template.add_parameter(Parameter("Name", Type="String"))
        role = self.template.add_parameter(Parameter("Role", Type="String"))

        sceptre_user_data["FunctionName"] = Ref(name)
        sceptre_user_data["Role"] = Ref(role)
        sceptre_user_data["Code"] = Code(**sceptre_user_data["Code"])
        function = self.template.add_resource(
            Function("Function", **sceptre_user_data))

        self.template.add_output(Output("Arn", Value=GetAtt(function, "Arn")))
    def add_lambda_function(self):
        if "CodeFilepath" in self.sceptre_user_data["FunctionProperties"]:
            file_path = self.sceptre_user_data["FunctionProperties"].pop(
                "CodeFilepath")
            code = Code(ZipFile=Join("", code_from_file(file_path)))
        elif "CodeInS3" in self.sceptre_user_data["FunctionProperties"]:
            s3_code = self.sceptre_user_data["FunctionProperties"].pop(
                "CodeInS3")
            code = Code(**s3_code)
        function_kwargs = {
            "FunctionName": Ref(self.name),
            "Description": Ref(self.description),
            "Handler": Ref(self.handler),
            "Role": Ref(self.iam_role),
            "Runtime": Ref(self.runtime),
            "Code": code
        }
        function_kwargs.update(self.sceptre_user_data["FunctionProperties"])
        if "Environment" in function_kwargs:
            environment_kwargs = {
                "Variables": function_kwargs.pop("Environment")
            }
            environment = Environment(**environment_kwargs)
            function_kwargs.update({"Environment": environment})
        if function_kwargs.pop("InVpc", False):
            print
            vpc_config_kwargs = {
                "SecurityGroupIds": Ref(self.sg_ids),
                "SubnetIds": Ref(self.subnet_ids)
            }
            vpc_config = VPCConfig(**vpc_config_kwargs)
            function_kwargs.update({"VpcConfig": vpc_config})

        self.function = self.template.add_resource(
            Function("LambdaFunction", **function_kwargs))
        self.template.add_output(
            Output("LambdaArn",
                   Description="The ARN of the Lambda Function",
                   Value=GetAtt(self.function, "Arn")))
Ejemplo n.º 22
0
 def add_lambda_function(self):
     lambda_code = open("templates/lambda_code/lambda_function.py", "r")
     self.lambda_function = self.template.add_resource(
         Function(
             "SpotTerminatingSceptre",
             FunctionName="Spot_Terminate_Sceptre",
             Description=
             "Function that trigers when a spot instance is marked for termination due to outbid",
             Code=Code(ZipFile=lambda_code.read()),
             Handler="index.lambda_handler",
             Role=GetAtt("LambdaExecutionRolemARC", "Arn"),
             Runtime="python3.6",
         ))
Ejemplo n.º 23
0
def _upload_code(
    s3_conn: S3Client,
    bucket: str,
    prefix: str,
    name: str,
    contents: Union[bytes, str],
    content_hash: str,
    payload_acl: PayloadAclTypeDef,
) -> Code:
    """Upload a ZIP file to S3 for use by Lambda.

    The key used for the upload will be unique based on the checksum of the
    contents. No changes will be made if the contents in S3 already match the
    expected contents.

    Args:
        s3_conn: S3 connection to use for operations.
        bucket: name of the bucket to create.
        prefix: S3 prefix to prepend to the constructed key name for
            the uploaded file
        name: desired name of the Lambda function. Will be used to construct a
            key name for the uploaded file.
        contents: byte string with the content of the file upload.
        content_hash: md5 hash of the contents to be uploaded.
        payload_acl: The canned S3 object ACL to be applied to the uploaded payload.

    Returns:
        CloudFormation Lambda Code object, pointing to the uploaded payload in S3.

    Raises:
        botocore.exceptions.ClientError: any error from boto3 is passed
            through.

    """
    LOGGER.debug("ZIP hash: %s", content_hash)
    key = "{}lambda-{}-{}.zip".format(prefix, name, content_hash)

    if _head_object(s3_conn, bucket, key):
        LOGGER.info("object already exists; not uploading: %s", key)
    else:
        LOGGER.info("uploading object: %s", key)
        s3_conn.put_object(
            Bucket=bucket,
            Key=key,
            Body=contents.encode() if isinstance(contents, str) else contents,
            ContentType="application/zip",
            ACL=payload_acl,
        )

    return Code(S3Bucket=bucket, S3Key=key)
Ejemplo n.º 24
0
    def add_resources(self):
        self.lambda_execution_role = self.template.add_resource(
            Role(
                "LambdaExecutionRole",
                Path="/",
                ManagedPolicyArns=[
                    "arn:aws:iam::aws:policy/AmazonSSMFullAccess",
                    "arn:aws:iam::aws:policy/AutoScalingFullAccess",
                ],
                Policies=[
                    Policy(
                        PolicyName="root",
                        PolicyDocument={
                            "Version":
                            "2012-10-17",
                            "Statement": [{
                                "Action": ["logs:*"],
                                "Resource": "arn:aws:logs:*:*:*",
                                "Effect": "Allow",
                            }],
                        },
                    )
                ],
                AssumeRolePolicyDocument={
                    "Version":
                    "2012-10-17",
                    "Statement": [{
                        "Action": ["sts:AssumeRole"],
                        "Effect": "Allow",
                        "Principal": {
                            "Service": ["lambda.amazonaws.com"]
                        },
                    }],
                },
            ))

        with open("lambda_functions/unregister_runner.py", "r") as f:
            self.runner_unregister_code = f.read()

        self.runner_unregister_function = self.template.add_resource(
            Function(
                "RunnerUnregisterFunction",
                Code=Code(ZipFile=self.runner_unregister_code),
                Handler=Ref(self.runner_lambda_handler),
                Role=GetAtt("LambdaExecutionRole", "Arn"),
                Runtime=Ref(self.runner_lambda_runtime),
                MemorySize="128",
                Timeout="30",
            ))
Ejemplo n.º 25
0
 def test_create_template(self):
     blueprint = Function('test_aws_lambda_Function', self.ctx)
     blueprint.resolve_variables(
         [
             Variable(
                 "Code",
                 Code(S3Bucket="test_bucket", S3Key="code_key")
             ),
             Variable("Description", "Test function."),
             Variable("Environment", {"TEST_NAME": "test_value"}),
             Variable("Runtime", "python2.7"),
         ]
     )
     blueprint.create_template()
     self.assertRenderedBlueprint(blueprint)
Ejemplo n.º 26
0
 def test_exclusive(self):
     lambda_func = Function(
         "AMIIDLookup",
         Handler="index.handler",
         Role=GetAtt("LambdaExecutionRole", "Arn"),
         Code=Code(
             S3Bucket="lambda-functions",
             S3Key="amilookup.zip",
         ),
         Runtime="nodejs",
         Timeout="25",
     )
     t = Template()
     t.add_resource(lambda_func)
     t.to_json()
Ejemplo n.º 27
0
 def test_package_type_zip(self):
     Function(
         "TestFunction",
         Code=Code(ZipFile=Join("", [
             "var response = require('cfn-response');",
             "exports.handler = function(event, context) {",
             "  var input = parseInt(event.ResourceProperties.Input);",
             "  var responseData = {Value: input * 5};", "  response.send("
             "    event, context, response.SUCCESS, responseData"
             "  );", "};"
         ]), ),
         Handler="index.handler",
         PackageType="Zip",
         Role=GetAtt("LambdaExecutionRole", "Arn"),
         Runtime="nodejs",
     ).validate()
Ejemplo n.º 28
0
 def setUp(self):
     self.code = Code(S3Bucket="test_bucket", S3Key="code_key")
     self.common_variables = {
         "Code": self.code,
         "DeadLetterArn": "arn:aws:sqs:us-east-1:12345:dlq",
         "Description": "Test function.",
         "Environment": {
             "Env1": "Value1"
         },
         "Handler": "handler",
         "KmsKeyArn": "arn:aws:kms:us-east-1:12345:key",
         "MemorySize": 128,
         "Runtime": "python2.7",
         "Timeout": 3,
     }
     self.ctx = Context(config=Config({'namespace': 'test'}))
    def build_function(self):
        name = self.template.add_parameter(Parameter("Name", Type="String"))
        role = self.template.add_parameter(Parameter("Role", Type="String"))

        kwargs = self.sceptre_user_data
        kwargs["FunctionName"] = Ref(name)
        kwargs["Role"] = Ref(role)
        kwargs["Code"] = Code(
            S3Bucket=kwargs.pop("S3_Bucket"),
            S3Key=kwargs.pop("S3_Key"),
            S3ObjectVersion=kwargs.pop("S3_Version"),
        )

        function = self.template.add_resource(Function("Function", **kwargs))

        self.template.add_output(Output("Arn", Value=GetAtt(function, "Arn")))
Ejemplo n.º 30
0
 def create_lambda_function(self):
   self.create_lambda_role()
   t = self.template
   self.lambda_fn = t.add_resource(Function(
     "Function",
     Code=Code(S3Bucket="js-test-buckett", S3Key="lambda_code.zip"),
     Description="Function that streams data from DDB Streams to ElasticSearch",
     Environment=Environment(
       Variables={
         "ES_ENDPOINT": ES ENDPOINT HERE,
         "ES_INDEX": "movies",
         "ES_DOCTYPE": "movie"
     }),
     Handler="lambda_code.handler",
     Role=GetAtt(self.LambdaExecutionRole, "Arn"),
     Runtime="nodejs6.10"
   ))  
Ejemplo n.º 31
0
    def test_troposphere(self, cfngin_context):
        """Test with troposphere object like returned from lambda hook."""
        bucket = 'test-bucket'
        s3_key = 'lambda_functions/my_function'
        cfngin_context.set_hook_data(
            'lambda', {'my_function': Code(S3Bucket=bucket, S3Key=s3_key)})
        var_bucket = Variable('test', '${hook_data lambda.my_function::'
                              'load=troposphere,get=S3Bucket}',
                              variable_type='cfngin')
        var_key = Variable('test',
                           '${hook_data lambda.my_function::get=S3Key}',
                           variable_type='cfngin')
        var_bucket.resolve(cfngin_context)
        var_key.resolve(cfngin_context)

        assert var_bucket.value == bucket
        assert var_key.value == s3_key