def main(): hello_world_lambda = lambda_.Function( "hello_world", runtime="python3.7", role=iam.lambda_role.arn, description="pulumi lambda hello world", handler="main.handler", code=pulumi.AssetArchive({".": pulumi.FileArchive("./lambda")}), ) hello_world_api = apigw.APIGateway("hello_world", hello_world_lambda) hello_world_api = hello_world_api.build() lambda_.Permission( "hello_world", function=hello_world_lambda.name, action="lambda:InvokeFunction", principal="apigateway.amazonaws.com", source_arn=hello_world_api.execution_arn.apply(lambda s: f"{s}/*/*"), )
def _deserialize_archive(prop: Dict[str, Any]) -> pulumi.Archive: if "assets" in prop: assets: Dict[str, Union[pulumi.Asset, pulumi.Archive]] = {} for key in prop["assets"]: a = _deserialize_property(prop["assets"][key]) if not isinstance(a, pulumi.Asset) and not isinstance( a, pulumi.Archive): raise AssertionError( "Expected an AssetArchive's assets to be unmarshaled Asset or Archive objects" ) assets[key] = a return pulumi.AssetArchive(assets) if "path" in prop: return pulumi.FileArchive(prop["path"]) if "uri" in prop: return pulumi.RemoteArchive(prop["uri"]) raise AssertionError( "Invalid archive encountered when unmarshaling resource property")
def __init__(self, name, scripts_bucket: s3.Bucket = None, scripts_version: str = None, datalake_bucket: s3.Bucket = None, dist_dir: str = None, tags: Dict[str, str] = None, opts=None): super().__init__('hca:EtlJobDistribution', name, None, opts) self.dist_dir = dist_dir self.scripts_bucket = scripts_bucket tags = tags if tags is None else {} # upload scripts to working/ for versioning/archiving self.versioned_scripts_prefix = f"working/{scripts_version}/{pulumi.get_project()}/{pulumi.get_stack()}" # jobs will point to scripts/ to keep consistent paths self.scripts_prefix = f"scripts/{pulumi.get_project()}/{pulumi.get_stack()}" # identify all files in dist/ for upload distfiles = [ f for f in glob.glob(self.dist_dir + '/*') if os.path.isfile(f) ] + self.get_packages() print(f"found dist files to dump to s3 => {distfiles}") merged_tags = tags.copy() merged_tags.update({'hca:datalake_version': scripts_version}) self.distribution_obj = s3.BucketObject( 'archive', bucket=self.scripts_bucket, source=pulumi.AssetArchive( {os.path.basename(f): pulumi.FileAsset(f) for f in distfiles}), key=os.path.join( self.versioned_scripts_prefix, f"{pulumi.get_project()}_{pulumi.get_stack()}_{datetime.now().strftime('%Y%m%d%H%M%S')}_distribution.zip" ), tags=merged_tags, opts=pulumi.ResourceOptions(parent=self))
def create_lambda(depends_on_resource): # Create Lambda IAM lambda_role lambda_role = aws.iam.Role('lambdaRole', assume_role_policy="""{ "Version": "2012-10-17", "Statement": [ { "Action": "sts:AssumeRole", "Principal": {"Service": "lambda.amazonaws.com"}, "Effect": "Allow", "Sid": "" } ] }""") lambda_layer = aws.lambda_.LayerVersion( "lambdaLayer", compatible_runtimes=["python3.8"], code=pulumi.FileArchive("lambda_layer"), layer_name="lambda_layer_name") func = aws.lambda_.Function( resource_name='ServerlessFunction', role=lambda_role.arn, runtime="python3.8", handler="lambda_code.lambda_handler", environment={ "variables": { "M3DB_URI": depends_on_resource.service_uri } }, code=pulumi.AssetArchive({'.': pulumi.FileArchive('lambda_func')}), opts=pulumi.ResourceOptions(depends_on=[depends_on_resource]), layers=[lambda_layer.arn], ) pulumi.export('lambda_name', func.name) return func
# Copyright 2016-2018, Pulumi Corporation. All rights reserved. import iam import pulumi from pulumi_aws import lambda_, sfn hello_world_fn = lambda_.Function( 'helloWorldFunction', role=iam.lambda_role.arn, runtime="python2.7", handler="hello_step.hello", code=pulumi.AssetArchive({'.': pulumi.FileArchive('./step_hello')})) state_defn = state_machine = sfn.StateMachine( 'stateMachine', role_arn=iam.sfn_role.arn, definition=hello_world_fn.arn.apply(lambda arn: """{ "Comment": "A Hello World example of the Amazon States Language using an AWS Lambda Function", "StartAt": "HelloWorld", "States": { "HelloWorld": { "Type": "Task", "Resource": "%s", "End": true } } }""" % arn)) pulumi.export('state_machine_arn', state_machine.id)
import pulumi from pulumi import asset from pulumi_gcp import storage, cloudfunctions # Create a GCP resource (Storage Bucket) bucket = storage.Bucket('my-bucket') # Create the function source asset api_bucket_object = storage.BucketObject( 'api-zip', bucket=bucket.name, source=pulumi.AssetArchive({'.': asset.FileArchive('./api')})) api_function = cloudfunctions.Function( 'api-func', source_archive_bucket=bucket.name, source_archive_object=api_bucket_object.name, runtime='nodejs10', entry_point='handler', trigger_http='true', available_memory_mb=128) api_invoker = cloudfunctions.FunctionIamMember( 'api-invoker', project=api_function.project, region=api_function.region, cloud_function=api_function.name, role='roles/cloudfunctions.invoker', member='allUsers') pulumi.export('endpoint', api_function.https_trigger_url)
"Version": "2012-10-17", "Statement": [{ "Action": ["logs:*", "cloudwatch:*"], "Resource": "*", "Effect": "Allow", }, { "Action": ["s3:*"], "Resource": model_bucket.arn.apply(lambda b: f"{b}/*"), "Effect": "Allow", }], }), ) lambda_func = lambda_.Function("classifier-fn", code=pulumi.AssetArchive({ ".": pulumi.FileArchive("./app"), }), role=role.arn, timeout=300, memory_size=512, runtime="python3.6", handler="app.lambda_handler", layers=["arn:aws:lambda:us-west-2:934676248949:layer:pytorchv1-py36:2"], environment={ "variables": { "MODEL_BUCKET": model_bucket.bucket, "MODEL_KEY": model_object.key, } } )
region = aws.config.region custom_stage_name = 'example' ################## ## Lambda Function ################## # Create a Lambda function, using code from the `./app` folder. lambda_func = aws.lambda_.Function( "mylambda", role=iam.lambda_role.arn, runtime="python3.7", handler="hello.handler", code=pulumi.AssetArchive({'.': pulumi.FileArchive('./hello_lambda')})) #################################################################### ## ## API Gateway REST API (API Gateway V1 / original) ## /{proxy+} - passes all requests through to the lambda function ## #################################################################### # Create a single Swagger spec route handler for a Lambda function. def swagger_route_handler(arn): return ({ "x-amazon-apigateway-any-method": { "x-amazon-apigateway-integration": { "uri":
def __init__(self, name, datalake_bucket: s3.Bucket = None, datalake_raw_path: str = None, fileproc_bucket: s3.Bucket = None, managed_policy_arns: List[str] = None, package_dir: str = None, tags: Dict[str, str] = None, opts: pulumi.ResourceOptions = None): super().__init__('hca:GlueNotificationLambda', name, None, opts) merged_tags = tags.copy() if tags else {} merged_tags.update({'hca:dataclassification': 'pii'}) role = iam.Role(f"{name}-role", path="/lambda/", description=f"role for glue notification lambda", assume_role_policy=json.dumps({ "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Action": "sts:AssumeRole", "Principal": { "Service": "lambda.amazonaws.com" } }] }), force_detach_policies=True, tags=merged_tags, opts=pulumi.ResourceOptions(parent=self)) # attach managed policies if managed_policy_arns: for index, policy in enumerate(managed_policy_arns): iam.RolePolicyAttachment( f"{name}-attach-policy-{index}", policy_arn=policy, role=role, opts=pulumi.ResourceOptions(parent=self)) fileprocpolicy = iam.RolePolicy( f"{name}-inline-policy", role=role, policy=pulumi.Output.all(datalake_bucket.bucket, fileproc_bucket.bucket).apply( lambda b: inline_policy(b[0], b[1])), opts=pulumi.ResourceOptions(parent=self)) self.function = lambda_.Function( f"{name}-function", runtime='python3.6', description= 'copy files from fileproc bucket to datalake raw bucket and trigger glue jobs', handler='glue_notification.main', environment={ 'variables': { 'S3_DATALAKE_BUCKET': datalake_bucket, 'S3_RAW_PATH': datalake_raw_path, 'PULUMI_STACK': pulumi.get_stack(), 'PULUMI_PROJECT': pulumi.get_project() } }, memory_size=256, timeout=60, code=pulumi.AssetArchive({ # use lambda-glue-notification created with build.py '.': pulumi.FileArchive(package_dir), }), role=role.arn, tags=merged_tags, opts=pulumi.ResourceOptions(parent=self)) lambda_.Permission(f"{name}-permission", action='lambda:InvokeFunction', principal='s3.amazonaws.com', function=self.function, source_arn=fileproc_bucket.arn, opts=pulumi.ResourceOptions(parent=self))
] }, "Resource": "arn:aws:sqs:%s:%s:ConnectOutboundCallData" % ( current_region.name, caller_ident.account_id, ) }] })) # Lambda lambda_exec_connect = aws.lambda_.Function( resource_name="exec_connect_contact_flow", description="Lambda for execute Connect contact flow", code=pulumi.AssetArchive( {".": pulumi.FileArchive("./source/exec_contact_flow")}), role=iam_role.arn, runtime="python3.7", handler="exec_contact_flow.lambda_handler", tags=label("tags"), environment={ "variables": { "SOURCE_PHONE_NUMBER": appconfig["source_phone_number"], "CONTACT_FLOW_ID": appconfig["contact_flow_id"], "INSTANCE_ID": appconfig["instance_id"], } }) lambda_send_sqs = aws.lambda_.Function( resource_name="send_message_to_sqs", description="Lambda for send message to SQS queue",
import pulumi from pulumi_aws import lambda_, sfn import iam import json find_instance = lambda_.Function( "ce-find-instance", role=iam.lambda_role.arn, runtime="python3.7", handler="find_instance.lambda_handler", code=pulumi.AssetArchive({".": pulumi.FileArchive("./lambda")}), ) get_instance_status = lambda_.Function( "ce-get-instance-status", role=iam.lambda_role.arn, runtime="python3.7", handler="get_instance_status.lambda_handler", code=pulumi.AssetArchive({".": pulumi.FileArchive("./lambda")}), ) create_image = lambda_.Function( "ce-create-image", role=iam.lambda_role.arn, runtime="python3.7", handler="create_image.lambda_handler", code=pulumi.AssetArchive({".": pulumi.FileArchive("./lambda")}), ) get_image_status = lambda_.Function( "ce-get-image-status",
import pulumi import pulumi_aws as aws from iam import lambdaRole config = pulumi.Config() startQueryExecutionFunction = aws.lambda_.Function( resource_name="start-query-execution", code=pulumi.AssetArchive( {".": pulumi.FileArchive("lambda/start_query_execution")}), description="Starts the execution of an Amazon Athena query", environment=aws.lambda_.FunctionEnvironmentArgs( variables={"AWS_ACCOUNT_ID": config.require("awsAccountId")}), handler="start_query_execution.handler", name="start-query-execution", role=lambdaRole.arn, runtime="python3.8", ) getQueryExecutionFunction = aws.lambda_.Function( resource_name="get-query-execution", code=pulumi.AssetArchive( {".": pulumi.FileArchive("lambda/get_query_execution")}), description= "Gets information about the execution of an Amazon Athena query", handler="get_query_execution.handler", name="get-query-execution", role=lambdaRole.arn, runtime="python3.8", )
"apigw", statement_id="AllowAPIGatewayInvoke", action="lambda:InvokeFunction", function=scan_fn, principal="apigateway.amazonaws.com", source_arn=scan_dep.execution_arn.apply(lambda x: f"{x}/*/*") ) # Lambda function for S3 trigger lambda_rekognition = lambda_.Function( resource_name='ImagesRekognition', role=iam.lambda_role.arn, runtime="python3.7", handler="lambda_rekognition.lambda_handler", code=pulumi.AssetArchive({ '.': pulumi.FileArchive('./lambda_rekognition') }), environment={"variables": {"DYNAMODB_TABLE": db.id}} ) # Give bucket permission to invoke Lambda lambda_event = lambda_.Permission( resource_name="lambda_img_event", action="lambda:InvokeFunction", principal="s3.amazonaws.com", source_arn=web_bucket.arn, function=lambda_rekognition.arn ) # Bucket notification that triggers Lambda on Put operation - For JPG bucket_notification = s3.BucketNotification(
"dynamodb:PutItem", "dynamodb:UpdateItem" ], "Effect": "Allow", "Sid": "dynamoAccess", "Resource": "*" }] }""") # Create a Lambda function, using code from the `./app` folder. lambda_func = aws.lambda_.Function( "mention-processing-lambda", role=iam.lambda_role.arn, runtime="python3.7", handler="mention_processing_lambda.webhook_handler", code=pulumi.AssetArchive({'.': pulumi.FileArchive('.')}), environment={ "variables": { 'SLACK_TOKEN': slack_token, 'SLACK_VERIFICATION_CODE': verification_token, # TODO: is this "apply" necessary? 'SUBSCRIPTIONS_TABLE_NAME': subscriptions_table.name.apply(lambda name: name), } }) ############################################# ## APIGateway RestAPI # Provide webhooks for slack to send events
bucket=site_bucket.id, source=pulumi.FileAsset("file.txt")) test_string_asset = aws.s3.BucketObject("testStringAsset", bucket=site_bucket.id, source=pulumi.StringAsset("<h1>File contents</h1>")) test_remote_asset = aws.s3.BucketObject("testRemoteAsset", bucket=site_bucket.id, source=pulumi.remoteAsset("https://pulumi.test")) test_file_archive = aws.s3.BucketObject("testFileArchive", bucket=site_bucket.id, source=pulumi.FileArchive("file.tar.gz")) test_remote_archive = aws.s3.BucketObject("testRemoteArchive", bucket=site_bucket.id, source=pulumi.RemoteArchive("https://pulumi.test/foo.tar.gz")) test_asset_archive = aws.s3.BucketObject("testAssetArchive", bucket=site_bucket.id, source=pulumi.AssetArchive({ "file.txt": pulumi.FileAsset("file.txt"), "string.txt": pulumi.StringAsset("<h1>File contents</h1>"), "remote.txt": pulumi.remoteAsset("https://pulumi.test"), "file.tar": pulumi.FileArchive("file.tar.gz"), "remote.tar": pulumi.RemoteArchive("https://pulumi.test/foo.tar.gz"), ".nestedDir": pulumi.AssetArchive({ "file.txt": pulumi.FileAsset("file.txt"), "string.txt": pulumi.StringAsset("<h1>File contents</h1>"), "remote.txt": pulumi.remoteAsset("https://pulumi.test"), "file.tar": pulumi.FileArchive("file.tar.gz"), "remote.tar": pulumi.RemoteArchive("https://pulumi.test/foo.tar.gz"), }), }))
def __init__(self, name, scripts_bucket: s3.Bucket = None, managed_policy_arns: List[str] = [], tags: Dict[str, str] = None, opts: pulumi.ResourceOptions = None): super().__init__('hca:ScriptArchiveLambda', name, None, opts) merged_tags = tags.copy() if tags else {} merged_tags.update({'hca:dataclassification': 'pii'}) role = iam.Role(f"{name}-role", path="/lambda/", description=f"role for script archive lambda", assume_role_policy=json.dumps({ "Version": "2012-10-17", "Statement": [{ "Effect": "Allow", "Action": "sts:AssumeRole", "Principal": { "Service": "lambda.amazonaws.com" } }] }), force_detach_policies=True, tags=merged_tags, opts=pulumi.ResourceOptions(parent=self)) # attach managed policies if managed_policy_arns: for index, policy in enumerate(managed_policy_arns): iam.RolePolicyAttachment( f"{name}-attach-policy-{index}", policy_arn=policy, role=role, opts=pulumi.ResourceOptions(parent=self)) fileprocpolicy = iam.RolePolicy( f"{name}-inline-policy", role=role, policy=scripts_bucket.bucket.apply(inline_policy), opts=pulumi.ResourceOptions(parent=self)) print( f"archive function => {os.path.abspath(os.path.join(os.getcwd(),'../../src/lambdas/scripts_archive.py'))}" ) self.function = lambda_.Function( f"{name}-function", runtime='python3.6', description= 'copy files from fileproc bucket to datalake raw bucket and trigger glue jobs', handler='index.main', memory_size=128, timeout=30, code=pulumi.AssetArchive({ # NOTE use relative path from pulumi root 'index.py': pulumi.FileAsset( os.path.abspath( os.path.join(os.getcwd(), '../../src/lambdas/scripts_archive.py'))), }), #code=pulumi.FileAsset(os.path.abspath(os.path.join(os.getcwd(),'../../src/lambdas/scripts_archive.py'))), role=role.arn, tags=merged_tags, opts=pulumi.ResourceOptions(parent=self)) lambda_.Permission(f"{name}-permission", action='lambda:InvokeFunction', principal='s3.amazonaws.com', function=self.function, source_arn=scripts_bucket.arn, opts=pulumi.ResourceOptions(parent=self))
"Action": [ "logs:CreateLogGroup", "logs:CreateLogStream", "logs:PutLogEvents" ], "Resource": "arn:aws:logs:*:*:*" }] }""") # Create a Lambda function to validate request authorization auth_lambda = aws.lambda_.Function( "auth-lambda", role=lambda_role.arn, runtime=aws.lambda_.Runtime.PYTHON3D8, code=pulumi.AssetArchive({ ".": pulumi.FileArchive("./authorizer"), }), handler="handler.handler", ) # Create a Lambda function to respond to HTTP requests hello_handler = aws.lambda_.Function( "hello-handler", role=lambda_role.arn, runtime=aws.lambda_.Runtime.PYTHON3D8, code=pulumi.AssetArchive({ ".": pulumi.FileArchive("./handler"), }), handler="handler.handler", )
"TO_PHONE_NUMBER": config.get("toPhoneNumber"), "FROM_PHONE_NUMBER": config.get("fromPhoneNumber"), } # We will store the source code to the Cloud Function in a Google Cloud Storage bucket. bucket = storage.Bucket("eta_demo_bucket") # The Cloud Function source code itself needs to be zipped up into an # archive, which we create using the pulumi.AssetArchive primitive. assets = {} for file in os.listdir(PATH_TO_SOURCE_CODE): location = os.path.join(PATH_TO_SOURCE_CODE, file) asset = pulumi.FileAsset(path=location) assets[file] = asset archive = pulumi.AssetArchive(assets=assets) # Create the single Cloud Storage object, which contains all of the function's # source code. ("main.py" and "requirements.txt".) source_archive_object = storage.BucketObject("eta_demo_object", name="main.py-%f" % time.time(), bucket=bucket.name, source=archive) # Create the Cloud Function, deploying the source we just uploaded to Google # Cloud Storage. fxn = cloudfunctions.Function("eta_demo_function", entry_point="get_demo", environment_variables=config_values, region="us-central1", runtime="python37",
}], }), ) # Read the config of whether to provision fixed concurrency for Lambda config = pulumi.Config() provisioned_concurrent_executions = config.get_float('provisionedConcurrency') # Create a Lambda function, using code from the `./app` folder. lambda_func = aws.lambda_.Function( "mylambda", opts=pulumi.ResourceOptions(depends_on=[policy]), runtime="dotnetcore3.1", code=pulumi.AssetArchive({ ".": pulumi.FileArchive(dotnet_application_publish_folder), }), timeout=300, handler=dotnet_application_entry_point, role=role.arn, publish=bool(provisioned_concurrent_executions), # Versioning required for provisioned concurrency environment={ "variables": { "COUNTER_TABLE": counter_table.name, }, }, ) if provisioned_concurrent_executions: concurrency = aws.lambda_.ProvisionedConcurrencyConfig(