def create_function_and_output_end_url(function_name, entry_point): source_archive_object = storage.BucketObject(function_name, name="main.py-%f" % time.time(), bucket=bucket.name, source=archive) # Create the Cloud Function, deploying the source we just uploaded to Google # Cloud Storage. fxn = cloudfunctions.Function( function_name, entry_point=entry_point, environment_variables=config_values, region=os.getenv("region", "us-east1"), runtime="python37", source_archive_bucket=bucket.name, source_archive_object=source_archive_object.name, trigger_http=True, available_memory_mb=256, project=project_name) # Export the DNS name of the bucket and the cloud function URL. pulumi.export("bucket_name", bucket.url) pulumi.export("fxn_url", fxn.https_trigger_url) return fxn.https_trigger_url
def __init__(self, name: str, args: FuncArgs, opts: ResourceOptions = None): super().__init__('custom:resource:CloudFunction', name, {}, opts) bucket_name = f'{name}-bucket' self.bucket = storage.Bucket(bucket_name, labels=args.tags, opts=ResourceOptions(parent=self)) bucket_object_name = f'{name}-bucketobject' self.bucket_object = storage.BucketObject( bucket_object_name, bucket=self.bucket.name, source=pulumi.FileArchive("./pythonfunction"), metadata=args.tags, opts=ResourceOptions(parent=self.bucket)) function_name = f'{name}-cloudfunction' self.function = cloudfunctions.Function( function_name, description="Serverless Function in GCP via Pulumi", runtime=args.runtime, available_memory_mb=args.available_memory_mb, source_archive_bucket=self.bucket.name, source_archive_object=self.bucket_object.name, trigger_http=args.trigger_http, entry_point=args.entry_point, #labels = args.tags, opts=ResourceOptions(parent=self)) iam_member_name = f'{name}-functioniammember' self.invoker = cloudfunctions.FunctionIamMember( iam_member_name, cloud_function=self.function.name, role=args.role, member=args.member, opts=ResourceOptions(parent=self.function)) self.register_outputs({})
bucket = storage.Bucket("eta_demo_bucket") # The Cloud Function source code itself needs to be zipped up into an # archive, which we create using the pulumi.AssetArchive primitive. assets = {} for file in os.listdir(PATH_TO_SOURCE_CODE): location = os.path.join(PATH_TO_SOURCE_CODE, file) asset = pulumi.FileAsset(path=location) assets[file] = asset archive = pulumi.AssetArchive(assets=assets) # Create the single Cloud Storage object, which contains all of the function's # source code. ("main.py" and "requirements.txt".) source_archive_object = storage.BucketObject("eta_demo_object", name="main.py-%f" % time.time(), bucket=bucket.name, source=archive) # Create the Cloud Function, deploying the source we just uploaded to Google # Cloud Storage. fxn = cloudfunctions.Function("eta_demo_function", entry_point="get_demo", environment_variables=config_values, region="us-central1", runtime="python37", source_archive_bucket=bucket.name, source_archive_object=source_archive_object.name, trigger_http=True) invoker = cloudfunctions.FunctionIamMember( "invoker",
], force_destroy=True, uniform_bucket_level_access=True, website=storage.BucketWebsiteArgs( main_page_suffix="index.html", not_found_page="404.html", )) # Set public access policy for the bucket storage.BucketIAMBinding('my-bucket-IAMBinding', bucket=bucket, role="roles/storage.objectViewer", members=["allUsers"]) # Upload files to bucket for subdir, dirs, files in os.walk('site'): for file in files: local_path = os.path.join(subdir, file) remote_path = local_path.replace('site/', '') storage.BucketObject(remote_path, name=remote_path, bucket=bucket, content_type='text/html', source=pulumi.FileAsset(local_path)) # Export the DNS name of the bucket pulumi.export('bucket_name', bucket.url) pulumi.export( 'bucket_endpoint', pulumi.Output.concat('http://storage.googleapis.com/', bucket.id))
from pulumi_gcp import storage, cloudfunctions from pulumi import export, asset bucket = storage.Bucket("bucket") py_bucket_object = storage.BucketObject( "python-zip", bucket=bucket.name, source=asset.AssetArchive({".": asset.FileArchive("./pythonfunc")})) py_function = cloudfunctions.Function( "python-func", source_archive_bucket=bucket.name, runtime="python37", source_archive_object=py_bucket_object.name, entry_point="handler", trigger_http="true", available_memory_mb=128, ) py_invoker = cloudfunctions.FunctionIamMember( "py-invoker", project=py_function.project, region=py_function.region, cloud_function=py_function.name, role="roles/cloudfunctions.invoker", member="allUsers", ) export("python_endpoint", py_function.https_trigger_url)
import pulumi from pulumi import asset from pulumi_gcp import storage, cloudfunctions # Create a GCP resource (Storage Bucket) bucket = storage.Bucket('my-bucket') # Create the function source asset api_bucket_object = storage.BucketObject( 'api-zip', bucket=bucket.name, source=pulumi.AssetArchive({'.': asset.FileArchive('./api')})) api_function = cloudfunctions.Function( 'api-func', source_archive_bucket=bucket.name, source_archive_object=api_bucket_object.name, runtime='nodejs10', entry_point='handler', trigger_http='true', available_memory_mb=128) api_invoker = cloudfunctions.FunctionIamMember( 'api-invoker', project=api_function.project, region=api_function.region, cloud_function=api_function.name, role='roles/cloudfunctions.invoker', member='allUsers') pulumi.export('endpoint', api_function.https_trigger_url)