def upload_lambda_functions(context, provider, **kwargs): """Builds Lambda payloads from user configuration and uploads them to S3. Constructs ZIP archives containing files matching specified patterns for each function, uploads the result to Amazon S3, then stores objects (of type :class:`troposphere.awslambda.Code`) in the context's hook data, ready to be referenced in blueprints. Configuration consists of some global options, and a dictionary of function specifications. In the specifications, each key indicating the name of the function (used for generating names for artifacts), and the value determines what files to include in the ZIP (see more details below). Payloads are uploaded to either a custom bucket or stackers default bucket, with the key containing it's checksum, to allow repeated uploads to be skipped in subsequent runs. The configuration settings are documented as keyword arguments below. Keyword Arguments: bucket (str, optional): Custom bucket to upload functions to. Omitting it will cause the default stacker bucket to be used. bucket_region (str, optional): The region in which the bucket should exist. If not given, the region will be either be that of the global `stacker_bucket_region` setting, or else the region in use by the provider. prefix (str, optional): S3 key prefix to prepend to the uploaded zip name. follow_symlinks (bool, optional): Will determine if symlinks should be followed and included with the zip artifact. Default: False payload_acl (str, optional): The canned S3 object ACL to be applied to the uploaded payload. Default: private functions (dict): Configurations of desired payloads to build. Keys correspond to function names, used to derive key names for the payload. Each value should itself be a dictionary, with the following data: * path (str): Base directory of the Lambda function payload content. If it not an absolute path, it will be considered relative to the directory containing the stacker configuration file in use. Files in this directory will be added to the payload ZIP, according to the include and exclude patterns. If not patterns are provided, all files in this directory (respecting default exclusions) will be used. Files are stored in the archive with path names relative to this directory. So, for example, all the files contained directly under this directory will be added to the root of the ZIP file. * include(str or list[str], optional): Pattern or list of patterns of files to include in the payload. If provided, only files that match these patterns will be included in the payload. Omitting it is equivalent to accepting all files that are not otherwise excluded. * exclude(str or list[str], optional): Pattern or list of patterns of files to exclude from the payload. If provided, any files that match will be ignored, regardless of whether they match an inclusion pattern. Commonly ignored files are already excluded by default, such as ``.git``, ``.svn``, ``__pycache__``, ``*.pyc``, ``.gitignore``, etc. Examples: .. Hook configuration. .. code-block:: yaml pre_build: - path: stacker.hooks.aws_lambda.upload_lambda_functions required: true enabled: true data_key: lambda args: bucket: custom-bucket follow_symlinks: true prefix: cloudformation-custom-resources/ payload_acl: authenticated-read functions: MyFunction: path: ./lambda_functions include: - '*.py' - '*.txt' exclude: - '*.pyc' - test/ .. Blueprint usage .. code-block:: python from troposphere.awslambda import Function from stacker.blueprints.base import Blueprint class LambdaBlueprint(Blueprint): def create_template(self): code = self.context.hook_data['lambda']['MyFunction'] self.template.add_resource( Function( 'MyFunction', Code=code, Handler='my_function.handler', Role='...', Runtime='python2.7' ) ) """ custom_bucket = kwargs.get('bucket') if not custom_bucket: bucket_name = context.bucket_name logger.info("lambda: using default bucket from stacker: %s", bucket_name) else: bucket_name = custom_bucket logger.info("lambda: using custom bucket: %s", bucket_name) custom_bucket_region = kwargs.get("bucket_region") if not custom_bucket and custom_bucket_region: raise ValueError("Cannot specify `bucket_region` without specifying " "`bucket`.") bucket_region = select_bucket_region(custom_bucket, custom_bucket_region, context.config.stacker_bucket_region, provider.region) # Check if we should walk / follow symlinks follow_symlinks = kwargs.get('follow_symlinks', False) if not isinstance(follow_symlinks, bool): raise ValueError('follow_symlinks option must be a boolean') # Check for S3 object acl. Valid values from: # https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl payload_acl = kwargs.get('payload_acl', 'private') # Always use the global client for s3 session = get_session(bucket_region) s3_client = session.client('s3') ensure_s3_bucket(s3_client, bucket_name, bucket_region) prefix = kwargs.get('prefix', '') results = {} for name, options in kwargs['functions'].items(): results[name] = _upload_function(s3_client, bucket_name, prefix, name, options, follow_symlinks, payload_acl) return results
def ensure_cfn_bucket(self): """The CloudFormation bucket where templates will be stored.""" if self.bucket_name: ensure_s3_bucket(self.s3_conn, self.bucket_name, self.bucket_region)
def ensure_cfn_bucket(self): """The CloudFormation bucket where templates will be stored.""" if self.bucket_name: ensure_s3_bucket(self.s3_conn, self.bucket_name, self.bucket_region)
def upload_lambda_functions(context, provider, **kwargs): """Builds Lambda payloads from user configuration and uploads them to S3. Constructs ZIP archives containing files matching specified patterns for each function, uploads the result to Amazon S3, then stores objects (of type :class:`troposphere.awslambda.Code`) in the context's hook data, ready to be referenced in blueprints. Configuration consists of some global options, and a dictionary of function specifications. In the specifications, each key indicating the name of the function (used for generating names for artifacts), and the value determines what files to include in the ZIP (see more details below). Payloads are uploaded to either a custom bucket or stackers default bucket, with the key containing it's checksum, to allow repeated uploads to be skipped in subsequent runs. The configuration settings are documented as keyword arguments below. Keyword Arguments: bucket (str, optional): Custom bucket to upload functions to. Omitting it will cause the default stacker bucket to be used. bucket_region (str, optional): The region in which the bucket should exist. If not given, the region will be either be that of the global `stacker_bucket_region` setting, or else the region in use by the provider. prefix (str, optional): S3 key prefix to prepend to the uploaded zip name. follow_symlinks (bool, optional): Will determine if symlinks should be followed and included with the zip artifact. Default: False payload_acl (str, optional): The canned S3 object ACL to be applied to the uploaded payload. Default: private functions (dict): Configurations of desired payloads to build. Keys correspond to function names, used to derive key names for the payload. Each value should itself be a dictionary, with the following data: * path (str): Base directory of the Lambda function payload content. If it not an absolute path, it will be considered relative to the directory containing the stacker configuration file in use. Files in this directory will be added to the payload ZIP, according to the include and exclude patterns. If not patterns are provided, all files in this directory (respecting default exclusions) will be used. Files are stored in the archive with path names relative to this directory. So, for example, all the files contained directly under this directory will be added to the root of the ZIP file. * include(str or list[str], optional): Pattern or list of patterns of files to include in the payload. If provided, only files that match these patterns will be included in the payload. Omitting it is equivalent to accepting all files that are not otherwise excluded. * exclude(str or list[str], optional): Pattern or list of patterns of files to exclude from the payload. If provided, any files that match will be ignored, regardless of whether they match an inclusion pattern. Commonly ignored files are already excluded by default, such as ``.git``, ``.svn``, ``__pycache__``, ``*.pyc``, ``.gitignore``, etc. Examples: .. Hook configuration. .. code-block:: yaml pre_build: - path: stacker.hooks.aws_lambda.upload_lambda_functions required: true enabled: true data_key: lambda args: bucket: custom-bucket follow_symlinks: true prefix: cloudformation-custom-resources/ payload_acl: authenticated-read functions: MyFunction: path: ./lambda_functions include: - '*.py' - '*.txt' exclude: - '*.pyc' - test/ .. Blueprint usage .. code-block:: python from troposphere.awslambda import Function from stacker.blueprints.base import Blueprint class LambdaBlueprint(Blueprint): def create_template(self): code = self.context.hook_data['lambda']['MyFunction'] self.template.add_resource( Function( 'MyFunction', Code=code, Handler='my_function.handler', Role='...', Runtime='python2.7' ) ) """ custom_bucket = kwargs.get('bucket') if not custom_bucket: bucket_name = context.bucket_name logger.info("lambda: using default bucket from stacker: %s", bucket_name) else: bucket_name = custom_bucket logger.info("lambda: using custom bucket: %s", bucket_name) custom_bucket_region = kwargs.get("bucket_region") if not custom_bucket and custom_bucket_region: raise ValueError("Cannot specify `bucket_region` without specifying " "`bucket`.") bucket_region = select_bucket_region( custom_bucket, custom_bucket_region, context.config.stacker_bucket_region, provider.region ) # Check if we should walk / follow symlinks follow_symlinks = kwargs.get('follow_symlinks', False) if not isinstance(follow_symlinks, bool): raise ValueError('follow_symlinks option must be a boolean') # Check for S3 object acl. Valid values from: # https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl payload_acl = kwargs.get('payload_acl', 'private') # Always use the global client for s3 session = get_session(bucket_region) s3_client = session.client('s3') ensure_s3_bucket(s3_client, bucket_name, bucket_region) prefix = kwargs.get('prefix', '') results = {} for name, options in kwargs['functions'].items(): results[name] = _upload_function(s3_client, bucket_name, prefix, name, options, follow_symlinks, payload_acl) return results