def spawn_functions(): global bucket bucket = storage.Bucket('dataorc-api-cloud-functions-new', force_destroy=False) function_external_urls = create_functions_from_function_dictionary( function_dictionary) print(function_external_urls)
def __init__(self, name: str, args: FuncArgs, opts: ResourceOptions = None): super().__init__('custom:resource:CloudFunction', name, {}, opts) bucket_name = f'{name}-bucket' self.bucket = storage.Bucket(bucket_name, labels=args.tags, opts=ResourceOptions(parent=self)) bucket_object_name = f'{name}-bucketobject' self.bucket_object = storage.BucketObject( bucket_object_name, bucket=self.bucket.name, source=pulumi.FileArchive("./pythonfunction"), metadata=args.tags, opts=ResourceOptions(parent=self.bucket)) function_name = f'{name}-cloudfunction' self.function = cloudfunctions.Function( function_name, description="Serverless Function in GCP via Pulumi", runtime=args.runtime, available_memory_mb=args.available_memory_mb, source_archive_bucket=self.bucket.name, source_archive_object=self.bucket_object.name, trigger_http=args.trigger_http, entry_point=args.entry_point, #labels = args.tags, opts=ResourceOptions(parent=self)) iam_member_name = f'{name}-functioniammember' self.invoker = cloudfunctions.FunctionIamMember( iam_member_name, cloud_function=self.function.name, role=args.role, member=args.member, opts=ResourceOptions(parent=self.function)) self.register_outputs({})
# Target destination and travel time offset. "DESTINATION": config.get("destination"), "TRAVEL_OFFSET": config.get("travelOffset"), # Google Maps API key. "GOOGLE_MAPS_API_KEY": config.get("googleMapsApiKey"), # Twilio account for sending SMS messages. "TWILLIO_ACCESS_TOKEN": config.get("twillioAccessToken"), "TWILLIO_ACCOUNT_SID": config.get("twillioAccountSid"), "TO_PHONE_NUMBER": config.get("toPhoneNumber"), "FROM_PHONE_NUMBER": config.get("fromPhoneNumber"), } # We will store the source code to the Cloud Function in a Google Cloud Storage bucket. bucket = storage.Bucket("eta_demo_bucket") # The Cloud Function source code itself needs to be zipped up into an # archive, which we create using the pulumi.AssetArchive primitive. assets = {} for file in os.listdir(PATH_TO_SOURCE_CODE): location = os.path.join(PATH_TO_SOURCE_CODE, file) asset = pulumi.FileAsset(path=location) assets[file] = asset archive = pulumi.AssetArchive(assets=assets) # Create the single Cloud Storage object, which contains all of the function's # source code. ("main.py" and "requirements.txt".) source_archive_object = storage.BucketObject("eta_demo_object", name="main.py-%f" % time.time(),
import os import pulumi from pulumi_gcp import storage # Create a GCP resource (Storage Bucket) bucket = storage.Bucket("static-website", cors=[ storage.BucketCorArgs( max_age_seconds=3600, methods=[ "GET", "HEAD", "PUT", "POST", "DELETE", ], origins=["http://example.com"], response_headers=["*"], ) ], force_destroy=True, uniform_bucket_level_access=True, website=storage.BucketWebsiteArgs( main_page_suffix="index.html", not_found_page="404.html", )) # Set public access policy for the bucket storage.BucketIAMBinding('my-bucket-IAMBinding', bucket=bucket, role="roles/storage.objectViewer",
from pulumi_gcp import storage, cloudfunctions from pulumi import export, asset bucket = storage.Bucket("bucket") py_bucket_object = storage.BucketObject( "python-zip", bucket=bucket.name, source=asset.AssetArchive({".": asset.FileArchive("./pythonfunc")})) py_function = cloudfunctions.Function( "python-func", source_archive_bucket=bucket.name, runtime="python37", source_archive_object=py_bucket_object.name, entry_point="handler", trigger_http="true", available_memory_mb=128, ) py_invoker = cloudfunctions.FunctionIamMember( "py-invoker", project=py_function.project, region=py_function.region, cloud_function=py_function.name, role="roles/cloudfunctions.invoker", member="allUsers", ) export("python_endpoint", py_function.https_trigger_url)
# Copyright 2016-2018, Pulumi Corporation. All rights reserved. import pulumi from pulumi_gcp import storage bucket = storage.Bucket('bucket-py') pulumi.export('bucket_name', bucket.name)
BUCKET_TARGET_URL = "c.storage.googleapis.com" MAIN_SUFFIX = "index.html" ERR_SUFFIX = "404.html" #Cloudflare ZONE_ID = "[YOUR-ZONE-ID]" RULE_TARGET = "[YOUR-ROOT-URL] e.g. yourdomain.org/*" #MUST END WITH "/$1" FORWARD_TARGET = "[YOUR-SERVING-URL] e.g. www.yourdomain.org/$1" #step 1. create the website bucket web_bucket = storage.Bucket( "official-web", name=DOMAIN_NAME #this variable MUST be set as your domain name , website=storage.BucketWebsiteArgs(main_page_suffix=MAIN_SUFFIX, not_found_page=ERR_SUFFIX), location="us-central1", project=TARGET_PROJECT, storage_class="REGIONAL") access_ctl = storage.DefaultObjectAccessControl("official-web-read", bucket=web_bucket.name, role="READER", entity="allUsers") #step 2. add RECORD to DNS www = cloudflare.Record("www-sub", zone_id=ZONE_ID, name="www", value="c.storage.googleapis.com",
import pulumi from pulumi import asset from pulumi_gcp import storage, cloudfunctions # Create a GCP resource (Storage Bucket) bucket = storage.Bucket('my-bucket') # Create the function source asset api_bucket_object = storage.BucketObject( 'api-zip', bucket=bucket.name, source=pulumi.AssetArchive({'.': asset.FileArchive('./api')})) api_function = cloudfunctions.Function( 'api-func', source_archive_bucket=bucket.name, source_archive_object=api_bucket_object.name, runtime='nodejs10', entry_point='handler', trigger_http='true', available_memory_mb=128) api_invoker = cloudfunctions.FunctionIamMember( 'api-invoker', project=api_function.project, region=api_function.region, cloud_function=api_function.name, role='roles/cloudfunctions.invoker', member='allUsers') pulumi.export('endpoint', api_function.https_trigger_url)
import pulumi from pulumi import ResourceOptions from pulumi_gcp import compute from pulumi_gcp import storage # #SOME KNOWN VAR (You might implement a yml reader to keep this!) # DOMAIN_NAME = "[USE-YOUR-DOMAIN-NAME]" #step 1. create the website bucket webbucket = storage.Bucket( "official-web", website=storage.BucketWebsiteArgs(main_page_suffix="index.html"), location='asia-east1') accessctl = storage.DefaultObjectAccessControl("official-web-read", bucket=webbucket.name, role="READER", entity="allUsers") #step 2. create an external ip access_addr = compute.GlobalAddress("addr-4-official-web") #step 3. setup loadbalancer, ssl and CDN backend_instance = compute.BackendBucket("backend-4-official-web", bucket_name=webbucket.name, enable_cdn=True) dedicated_ssl = compute.ManagedSslCertificate( "ssl-4-official-web",
# creating stack name tag stackName = pulumi.get_stack() # common tags. need to pass in commonTags = { "project": projectName, "stack": stackName, } # Create a GCP resource (Storage Bucket) # With no getresourcename #bucket = storage.Bucket('shaht-my-bucket', labels=commonTags) # Expected output: gs://shaht-my-bucket-7477081 bucket = storage.Bucket(getResourceName(f"{myname}-bucket"), labels=commonTags) # Expected output: gs://gcp-reference-architecture-py-shaht-my-bucket-b89e42f #bucket = storage.Bucket(getResourceName(), labels=commonTags) # Expected output: gs://gcp-reference-architecture-py-1d70b6d #mynetwork = network.Vpc("shaht-vpc", network.VpcArgs(subnet_cidr_blocks=subnet_cidr_blocks,)) #mynetwork = network.Vpc(getResourceName(), network.VpcArgs(subnet_cidr_blocks=subnet_cidr_blocks,)) # creates vpc mynetwork = network.Vpc( getResourceName(f"{myname}-vpc"), network.VpcArgs(subnet_cidr_blocks=subnet_cidr_blocks, )) # creates postgres sql server in cloud mydatabase = postgres.Database( getResourceName(f"{myname}-database"),
"""A Google Cloud Python Pulumi program""" import pulumi from pulumi_gcp import organizations, projects, pubsub, serviceaccount, storage from pulumi_utils.projects import ephemeral_project nameprefix = "shaht" # Create a GCP resource (Storage Bucket) bucket = storage.Bucket(f'{nameprefix}-bucket') organization = organizations.get_organization(organization="248032716856") #Create an ephemeral project ephemeral_project = ephemeral_project.Project( #"mitch-ephemeral-project", f'{nameprefix}-ephemeral-project', ephemeral_project.ProjectArgs( #project_name="mitch-ephemeral-project", project_name=f'{nameprefix}-ephemeral-project', root_project_name="pulum-297218", organization_name="248032716856"), ) # Export the DNS name of the bucket pulumi.export('bucket_name', bucket.url) pulumi.export('org', organization) pulumi.export('proj', ephemeral_project)
def __init__(self, name: str, args: BucketWithNotificationArgs, opts: ResourceOptions = None): super().__init__("unopac:modules:BucketWithNotification", name, {}, opts) log.info( f"Trying to get project default service account for new project with {args.gcp_project}" ) self.bucket = storage.Bucket( args.bucket_resource_name, project=args.gcp_project.project_id, opts=opts, ) gcs_account = args.gcp_project.project_id.apply( lambda project_id: self._get_storage_project_service_account( project_id, opts)) self.topic = pubsub.Topic( f"{args.bucket_resource_name}-{args.topic_resource_name_suffix}", project=args.gcp_project.project_id, opts=opts, ) self.gcs_default_project_service_account_topicbindingtopic_iambinding = ( pubsub.TopicIAMBinding( f"{name}-default-project-service-account-topic-iam-binding", topic=self.topic.id, role="roles/pubsub.publisher", members=[f"serviceAccount:{gcs_account.email_address}"], opts=opts, )) self.pubsub_accountcreator_policy_binding = projects.IAMMember( resource_name= "project-service-account-pubsub-serviceAccount-tokenCreator", project=args.gcp_project.project_id, member=Output.concat( "serviceAccount:service-", args.gcp_project.number, "@gcp-sa-pubsub.iam.gserviceaccount.com", ), role="roles/iam.serviceAccountTokenCreator", ) self.notification = storage.Notification( f"{args.bucket_resource_name}-notification", bucket=self.bucket.name, payload_format="JSON_API_V1", topic=self.topic.id, event_types=[ "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", ], custom_attributes={ "new-attribute": "new-attribute-value", }, opts=opts, )
"""A Python Pulumi program""" import pulumi from pulumi_gcp import storage # Create a GCP resource (Storage Bucket) bucket = storage.Bucket('my-bucket-123-123', labels={'environment': "dev"}) # Export the DNS name of the bucket pulumi.export('bucket_name', bucket.url) from pulumi import Config, export, get_project, get_stack, Output, ResourceOptions from pulumi_gcp.config import project, zone from pulumi_gcp.container import Cluster, ClusterMasterAuthArgs, ClusterNodeConfigArgs from pulumi_kubernetes import Provider from pulumi_kubernetes.apps.v1 import Deployment, DeploymentSpecArgs from pulumi_kubernetes.core.v1 import ContainerArgs, PodSpecArgs, PodTemplateSpecArgs, Service, ServicePortArgs, ServiceSpecArgs from pulumi_kubernetes.meta.v1 import LabelSelectorArgs, ObjectMetaArgs from pulumi_kubernetes.rbac.v1 import ClusterRole, ClusterRoleBinding from pulumi_kubernetes.core.v1 import ServiceAccount from pulumi_random import RandomPassword import pulumi_gcp # Read in some configurable settings for our cluster: config = Config('.env') # nodeCount is the number of cluster nodes to provision. Defaults to 3 if unspecified. NODE_COUNT = config.get_int('node_count') or 3 # nodeMachineType is the machine type to use for cluster nodes. Defaults to n1-standard-1 if unspecified. # See https://cloud.google.com/compute/docs/machine-types for more details on available machine types.
"""A Google Cloud Python Pulumi program""" import pulumi from pulumi_gcp import storage # Create a GCP resource (Storage Bucket) bucket = storage.Bucket('my-bucket', location="US") # Export the DNS name of the bucket pulumi.export('bucket_name', bucket.url)