def create_cloudbuild(client, req_body): """Creates a cloudbuild based on req_body""" defaultProject = get_default_project() defaultLocation = "global" try: resp = client.execute( "create", parent_key="projectId", parent_schema=defaultProject, params={ "body": req_body, "parent": f"projects/{defaultProject}/locations/{defaultLocation}", }, ) log.info("creating cloudbuild") except HttpError as e: raise e cloudbuild_config = GConfig().cloudbuild or {} timeout_seconds = cloudbuild_config.get("timeout", "600s") if "s" not in timeout_seconds: log.info( "Not a valid timeout. Needs to be a duration that ends is 's'. Defaulting to 600s" ) timeout = 600 else: timeout = int(timeout_seconds.split("s")[0]) client.wait_for_operation(resp["name"], calls="operations", timeout=timeout)
def __init__( self, function_name="goblet", backend="cloudfunction", local="local", cors=None, client_versions=None, ): self.function_name = GConfig().function_name or function_name self.client_versions = DEFAULT_CLIENT_VERSIONS self.client_versions.update(client_versions or {}) super(Goblet, self).__init__( function_name=self.function_name, backend=backend, cors=cors, client_versions=self.client_versions, ) self.log = logging.getLogger(__name__) self.headers = {} self.g = G() # Setup Local module_name = GConfig().main_file or "main" module_name = module_name.replace(".py", "") if local and sys.modules.get(module_name): self.log = logging.getLogger("werkzeug") def local_func(request): return self(request) setattr(sys.modules[module_name], local, local_func)
def local(local_arg, stage): """ Requires the local argument to be set in the Goblet class. The default is local. For example in this case you would use local_function Goblet("test_function",local="local_function") """ try: if stage: os.environ["STAGE"] = stage config = GConfig() source = config.main_file or "main.py" subprocess.check_output( [ "functions-framework", f"--target={local_arg}", "--debug", f"--source={source}", ] ) except subprocess.CalledProcessError: click.echo( "Incorrect argument. Make sure you set the local param in your Goblet class and that it matches the arg used in goblet local" )
def _deploy(self, sourceUrl=None, entrypoint=None, config={}): if not self.resources or not sourceUrl: return log.info("deploying storage functions......") config = GConfig() user_configs = config.cloudfunction or {} for bucket in self.resources: req_body = { "name": f"{self.cloudfunction}-storage-{bucket['name']}-{bucket['event_type']}" .replace(".", "-"), "description": config.description or "created by goblet", "entryPoint": entrypoint, "sourceUploadUrl": sourceUrl, "eventTrigger": { "eventType": f"google.storage.object.{bucket['event_type']}", "resource": f"projects/{get_default_project()}/buckets/{bucket['bucket']}", }, "runtime": config.runtime or "python37", **user_configs, } create_cloudfunction(self.versioned_clients.cloudfunctions, req_body)
def deploy(project, location, stage, skip_function, only_function, config, force): """ You can set the project and location using environment variable GOOGLE_PROJECT and GOOGLE_LOCATION Note: Allowed GOOGLE_LOCATION values for API GATEWAY are: asia-east1, europe-west1, us-eastl1 and us-central1. Note: Make sure api-gateway, cloudfunctions, and storage are enabled in your project """ try: _project = project or get_default_project() if not _project: click.echo( "Project not found. Set --project flag or add to gcloud by using gcloud config set project PROJECT" ) os.environ["GOOGLE_PROJECT"] = _project os.environ["GOOGLE_LOCATION"] = location if stage: os.environ["STAGE"] = stage if config: config = json.loads(config) app = get_goblet_app(GConfig().main_file or "main.py") # Deployer({"name": app.function_name}).deploy( # app, # skip_function=skip_function, # only_function=only_function, # config=config, # force=force, # ) app.deploy(skip_function, only_function, config=config, force=False) except FileNotFoundError as not_found: click.echo( f"Missing {not_found.filename}. Make sure you are in the correct directory and this file exists" )
def create_cloudfunction(client, req_body, config=None): """Creates a cloudfunction based on req_body""" function_name = req_body["name"].split("/")[-1] try: resp = client.execute("create", parent_key="location", params={"body": req_body}) log.info(f"creating cloudfunction {function_name}") except HttpError as e: if e.resp.status == 409: log.info(f"updating cloudfunction {function_name}") resp = client.execute( "patch", parent_key="name", parent_schema=req_body["name"], params={"body": req_body}, ) else: raise e client.wait_for_operation(resp["name"], calls="operations") # Set IAM Bindings config = GConfig(config=config) if config.bindings: log.info(f"adding IAM bindings for cloudfunction {function_name}") policy_bindings = {"policy": {"bindings": config.bindings}} resp = client.execute( "setIamPolicy", parent_key="resource", parent_schema=req_body["name"], params={"body": policy_bindings}, )
def openapi(cloudfunction, stage, version): """ You can find the generated openapi spec in /.goblet folder. The cloudfunction argument sets the correct x-google-backend address in the openapi spec. """ if stage: os.environ["STAGE"] = stage try: app = get_goblet_app(GConfig().main_file or "main.py") app.handlers["route"].generate_openapi_spec(cloudfunction) except FileNotFoundError as not_found: click.echo( f"Missing {not_found.filename}. Make sure you are in the correct directory and this file exists" ) if version: with open(f"{get_g_dir()}/{app.function_name}_openapi_spec.yml", "r") as f: data = f.read() headers = { "accept": "application/yaml", "Content-Type": "application/yaml", } response = requests.post( "https://converter.swagger.io/api/convert", headers=headers, data=data ) with open(f"{get_g_dir()}/{app.function_name}_openapi_spec_3.yml", "w") as f: f.write(response.text)
def _deploy(self, sourceUrl=None, entrypoint=None, config={}): if not self.resources: return if self.backend == "cloudfunction": resp = self.versioned_clients.cloudfunctions.execute( "get", parent_key="name", parent_schema=self.cloudfunction) if not resp: raise ValueError(f"Function {self.cloudfunction} not found") target = resp["httpsTrigger"]["url"] service_account = resp["serviceAccountEmail"] if self.backend == "cloudrun": target = get_cloudrun_url(self.versioned_clients.run, self.name) config = GConfig(config=config) if config.cloudrun and config.cloudrun.get("service-account"): service_account = config.cloudrun.get("service-account") elif config.scheduler and config.scheduler.get("serviceAccount"): service_account = config.scheduler.get("serviceAccount") else: raise ValueError( "Service account not found in cloudrun. You can set `serviceAccount` field in config.json under `scheduler`" ) log.info("deploying scheduled jobs......") for job_name, job in self.resources.items(): job["job_json"]["httpTarget"]["uri"] = target job["job_json"]["httpTarget"]["oidcToken"][ "serviceAccountEmail"] = service_account self.deploy_job(job_name, job["job_json"])
def test_get_item(self): config = GConfig(test_config) assert config.cloudfunction == { "environmentVariables": { "key": "value" } } assert not config.not_exists
def _deploy_trigger(self, topic_name, source=None, entrypoint=None): function_name = f"{self.cloudfunction}-topic-{topic_name}" log.info(f"deploying topic function {function_name}......") config = GConfig() user_configs = config.cloudfunction or {} if self.versioned_clients.cloudfunctions.version == "v1": req_body = { "name": function_name, "description": config.description or "created by goblet", "entryPoint": entrypoint, "sourceUploadUrl": source["uploadUrl"], "eventTrigger": { "eventType": "providers/cloud.pubsub/eventTypes/topic.publish", "resource": f"projects/{get_default_project()}/topics/{topic_name}", }, "runtime": get_function_runtime(self.versioned_clients.cloudfunctions, config), **user_configs, } create_cloudfunctionv1(self.versioned_clients.cloudfunctions, {"body": req_body}) elif self.versioned_clients.cloudfunctions.version.startswith("v2"): params = { "body": { "name": function_name, "environment": "GEN_2", "description": config.description or "created by goblet", "buildConfig": { "runtime": get_function_runtime( self.versioned_clients.cloudfunctions, config), "entryPoint": entrypoint, "source": { "storageSource": source["storageSource"] }, }, "eventTrigger": { "eventType": "google.cloud.pubsub.topic.v1.messagePublished", "pubsubTopic": f"projects/{get_default_project()}/topics/{topic_name}", }, **user_configs, }, "functionId": function_name.split("/")[-1], } create_cloudfunctionv2(self.versioned_clients.cloudfunctions, params) else: raise
def test_deadline(self): gw = ApiGateway("test", backend="cloudrun") assert gw.get_timeout(GConfig({"cloudrun_revision": { "timeout": 300 }})) == 300 assert gw.get_timeout(GConfig()) == 15 gw = ApiGateway("test", backend="cloudfunction") assert gw.get_timeout(GConfig({"cloudfunction": { "timeout": 300 }})) == 300 assert gw.get_timeout(GConfig()) == 15 gw = ApiGateway("test", backend="cloudfunctionv2") assert (gw.get_timeout( GConfig( {"cloudfunction": { "serviceConfig": { "timeoutSeconds": 300 } }})) == 300) assert gw.get_timeout(GConfig()) == 15 gw = ApiGateway("test", backend="cloudfunction") assert (gw.get_timeout( GConfig({ "cloudfunction": { "timeout": 300 }, "api_gateway": { "deadline": 200 }, })) == 200)
def create_cloudrun(self, client, config={}): """Creates http cloudfunction""" config = GConfig(config=config) cloudrun_configs = config.cloudrun or {} if not cloudrun_configs.get("no-allow-unauthenticated") or cloudrun_configs.get( "allow-unauthenticated" ): cloudrun_configs["no-allow-unauthenticated"] = None cloudrun_options = [] for k, v in cloudrun_configs.items(): cloudrun_options.append(f"--{k}") if v: cloudrun_options.append(v) base_command = [ "gcloud", "run", "deploy", self.name, "--project", get_default_project(), "--region", get_default_location(), "--source", get_dir(), "--command", "functions-framework,--target=goblet_entrypoint", "--port", "8080", ] base_command.extend(cloudrun_options) try: if not os.path.exists(get_dir() + "/Dockerfile") and not os.path.exists( get_dir() + "/Procfile" ): log.info( "No Dockerfile or Procfile found for cloudrun backend. Writing default Dockerfile" ) write_dockerfile() subprocess.check_output(base_command, env=os.environ) except subprocess.CalledProcessError: log.error( "Error during cloudrun deployment while running the following command" ) log.error((" ").join(base_command)) sys.exit(1) # Set IAM Bindings if config.bindings: log.info(f"adding IAM bindings for cloudrun {self.name}") policy_bindings = {"policy": {"bindings": config.bindings}} client.execute( "setIamPolicy", parent_key="resource", parent_schema=self.run_name, params={"body": policy_bindings}, )
def _deploy(self, source=None, entrypoint=None, config={}): client = self.versioned_clients.cloudfunctions if not self.resources or not source: return log.info("deploying storage functions......") config = GConfig() user_configs = config.cloudfunction or {} for bucket in self.resources: function_name = f"{self.cloudfunction}-storage-{bucket['name']}-{bucket['event_type']}".replace( ".", "-" ) if self.versioned_clients.cloudfunctions.version == "v1": req_body = { "name": function_name, "description": config.description or "created by goblet", "entryPoint": entrypoint, "sourceUploadUrl": source["uploadUrl"], "eventTrigger": { "eventType": f"google.storage.object.{bucket['event_type']}", "resource": f"projects/{get_default_project()}/buckets/{bucket['bucket']}", }, "runtime": get_function_runtime(client, config), **user_configs, } create_cloudfunctionv1( self.versioned_clients.cloudfunctions, {"body": req_body} ) elif self.versioned_clients.cloudfunctions.version.startswith("v2"): params = { "body": { "name": function_name, "environment": "GEN_2", "description": config.description or "created by goblet", "buildConfig": { "runtime": get_function_runtime(client, config), "entryPoint": entrypoint, "source": {"storageSource": source["storageSource"]}, }, "eventTrigger": { "eventType": f"google.cloud.storage.object.v1.{bucket['event_type']}", "eventFilters": [ { "attribute": "bucket", "value": bucket["bucket"], } ], }, **user_configs, }, "functionId": function_name.split("/")[-1], } create_cloudfunctionv2(self.versioned_clients.cloudfunctions, params) else: raise
def generate_openapi_spec(self, cloudfunction): config = GConfig() spec = OpenApiSpec( self.name, cloudfunction, security_definitions=config.securityDefinitions, security=config.security, ) spec.add_apigateway_routes(self.resources) with open(f"{get_g_dir()}/{self.name}_openapi_spec.yml", "w") as f: spec.write(f)
def zip(self): """Zips requirements.txt, python files and any additional files based on config.customFiles""" config = GConfig() self.zip_file("requirements.txt") if config.main_file: self.zip_file(config.main_file, "main.py") include = config.customFiles or [] include.append("*.py") with warnings.catch_warnings(): warnings.simplefilter("ignore") self.zip_directory(get_dir() + "/*", include=include)
def package(stage): """generates the goblet zipped package in .goblet folder""" try: if stage: os.environ["STAGE"] = stage app = get_goblet_app(GConfig().main_file or "main.py") app.package() except FileNotFoundError as not_found: click.echo( f"Missing {not_found.filename}. Make sure you are in the correct directory and this file exists" )
def generate_openapi_spec(self, cloudfunction): config = GConfig() deadline = self.get_timeout(config) spec = OpenApiSpec( self.name, cloudfunction, security_definitions=config.securityDefinitions, security=config.security, marshmallow_attribute_function=self.marshmallow_attribute_function, deadline=deadline, ) spec.add_apigateway_routes(self.resources) with open(f"{get_g_dir()}/{self.name}_openapi_spec.yml", "w") as f: spec.write(f)
def create_function(self, client, url, entrypoint, config={}): """Creates http cloudfunction""" config = GConfig(config=config) user_configs = config.cloudfunction or {} req_body = { "name": self.func_name, "description": config.description or "created by goblet", "entryPoint": entrypoint, "sourceUploadUrl": url, "httpsTrigger": {}, "runtime": "python37", **user_configs, } create_cloudfunction(client, req_body, config=config.config)
def __init__(self, config={}, versioned_clients=None, name="goblet"): self.versioned_clients = versioned_clients config = GConfig(config=config) self.cloudrun_configs = config.cloudrun or {} self.cloudrun_revision = config.cloudrun_revision or {} self.cloudrun_container = config.cloudrun_container or {} self.cloudrun_container["command"] = self.cloudrun_container.get( "command") or [ "functions-framework", "--target=goblet_entrypoint", ] self.req_body = {} self.latestArtifact = "" self.name = name
def _deploy(self, sourceUrl=None, entrypoint=None, config={}): if not self.resources: return gconfig = GConfig(config=config) if gconfig.eventarc and gconfig.eventarc.get("serviceAccount"): service_account = gconfig.eventarc.get("serviceAccount") elif gconfig.cloudrun and gconfig.cloudrun.get("service-account"): service_account = gconfig.cloudrun.get("service-account") else: raise ValueError( "Service account not found for cloudrun or eventarc. You can set `serviceAccount` field in config.json under `eventarc`" ) log.info("deploying eventarc triggers......") for trigger in self.resources: topic = {} if trigger.get("topic"): topic = { "transport": { "pubsub": { "topic": f"projects/{get_default_project()}/topics/{trigger.get('topic')}" } } } req_body = { "name": f"projects/{get_default_project()}/locations/{trigger['region']}/triggers/{trigger['trigger_name']}", "eventFilters": trigger["event_filters"], "serviceAccount": service_account, "destination": { "cloudRun": { "service": self.name, "region": get_default_location(), "path": f"/x-goblet-eventarc-triggers/{trigger['trigger_name']}", } }, **topic, } create_eventarc_trigger( self.versioned_clients.eventarc, trigger["trigger_name"], trigger["region"], req_body, )
def openapi(cloudfunction, stage): """ You can find the generated openapi spec in /.goblet folder. The cloudfunction argument sets the correct x-google-backend address in the openapi spec. """ if stage: os.environ["STAGE"] = stage try: app = get_goblet_app(GConfig().main_file or "main.py") app.handlers["route"].generate_openapi_spec(cloudfunction) except FileNotFoundError as not_found: click.echo( f"Missing {not_found.filename}. Make sure you are in the correct directory and this file exists" )
def create(stage): """create a new stage in config.json""" config = GConfig() if config.stages and stage in config.stages: return click.echo(f"stage {stage} already exists") app = get_goblet_app(GConfig().main_file or "main.py") function_name = f"{app.function_name}-{stage}" if not config.stages: config.stages = {stage: {"function_name": function_name}} else: config.stages[stage] = {"function_name": function_name} config.write() click.echo( f"stage {stage} created in config.json with function name {function_name}" )
def create_build(self, client, source=None, name="goblet", config={}): """Creates http cloudbuild""" if config: self.config = GConfig(config=config) build_configs = self.config.cloudbuild or {} registry = ( build_configs.get("artifact_registry") or f"{get_default_location()}-docker.pkg.dev/{get_default_project()}/cloud-run-source-deploy/{name}" ) build_configs.pop("artifact_registry", None) if build_configs.get( "serviceAccount") and not build_configs.get("logsBucket"): build_options = build_configs.get("options", {}) if not build_options.get("logging"): build_options["logging"] = "CLOUD_LOGGING_ONLY" build_configs["options"] = build_options self.log.info( "service account given but no logging bucket so defaulting to cloud logging only" ) req_body = { "source": { "storageSource": source["storageSource"] }, "steps": [{ "name": "gcr.io/cloud-builders/docker", "args": ["build", "-t", registry, "."], }], "images": [registry], **build_configs, } create_cloudbuild(client, req_body) # Set IAM Bindings if self.config.bindings: self.log.info(f"adding IAM bindings for cloudrun {self.name}") policy_bindings = {"policy": {"bindings": self.config.bindings}} client.run.execute( "setIamPolicy", parent_key="resource", parent_schema=self.run_name, params={"body": policy_bindings}, )
def deploy(self, force=False, config=None): if config: config = GConfig(config=config) else: config = self.config put_headers = { "content-type": "application/zip", } source, changes = self._gcs_upload(self.client, put_headers, force=force) if not changes: return None if self.app.is_http(): client, params = self._get_upload_params(source, config=config) create_cloudfunctionv2(client, params, config=config) return source
def deploy(self, force=False, config=None): versioned_clients = VersionedClients(self.app.client_versions) if config: self.config = GConfig(config=config) put_headers = { "content-type": "application/zip", } if not os.path.exists(get_dir( ) + "/Dockerfile") and not os.path.exists(get_dir() + "/Procfile"): self.log.info( "No Dockerfile or Procfile found for cloudrun backend. Writing default Dockerfile" ) write_dockerfile() self._zip_file("Dockerfile") source, changes = self._gcs_upload( self.client, put_headers, upload_client=versioned_clients.run_uploader, force=force, ) if not changes: return None self.create_build(versioned_clients.cloudbuild, source, self.name, config) serviceRevision = RevisionSpec(config, versioned_clients, self.name) serviceRevision.deployRevision() # Set IAM Bindings if self.config.bindings: self.log.info(f"adding IAM bindings for cloudrun {self.name}") policy_bindings = {"policy": {"bindings": self.config.bindings}} self.client.execute( "setIamPolicy", parent_key="resource", parent_schema=self.run_name, params={"body": policy_bindings}, ) return source
def _deploy_subscription(self, topic_name, topic, config={}): sub_name = f"{self.name}-{topic_name}" log.info(f"deploying pubsub subscription {sub_name}......") if self.backend == "cloudrun": push_url = get_cloudrun_url(self.versioned_clients.run, self.name) else: push_url = get_cloudfunction_url( self.versioned_clients.cloudfunctions, self.name) gconfig = GConfig(config=config) if gconfig.pubsub and gconfig.pubsub.get("serviceAccountEmail"): service_account = gconfig.pubsub.get("serviceAccountEmail") elif (self.backend == "cloudrun" and gconfig.cloudrun and gconfig.cloudrun.get("service-account")): service_account = gconfig.cloudrun.get("service-account") elif (self.backend.startswith("cloudfunction") and gconfig.cloudfunction and gconfig.pubsub.get("serviceAccountEmail")): service_account = gconfig.pubsub.get("serviceAccountEmail") else: raise ValueError( "Service account not found in cloudrun or cloudfunction. You can set `serviceAccountEmail` field in config.json under `pubsub`" ) req_body = { "name": sub_name, "topic": f"projects/{topic['project']}/topics/{topic_name}", "filter": topic["filter"] or "", "pushConfig": {} if topic["config"].get("enableExactlyOnceDelivery", None) else { "pushEndpoint": push_url, "oidcToken": { "serviceAccountEmail": service_account, "audience": push_url, }, }, **topic["config"], } create_pubsub_subscription( client=self.versioned_clients.pubsub, sub_name=sub_name, req_body=req_body, )
def _deploy_trigger(self, topic_name, sourceUrl=None, entrypoint=None): function_name = f"{self.cloudfunction}-topic-{topic_name}" log.info(f"deploying topic function {function_name}......") config = GConfig() user_configs = config.cloudfunction or {} req_body = { "name": function_name, "description": config.description or "created by goblet", "entryPoint": entrypoint, "sourceUploadUrl": sourceUrl, "eventTrigger": { "eventType": "providers/cloud.pubsub/eventTypes/topic.publish", "resource": f"projects/{get_default_project()}/topics/{topic_name}", }, "runtime": config.runtime or "python37", **user_configs, } create_cloudfunction(self.versioned_clients.cloudfunctions, req_body)
def __init__(self, app, client, func_path, config={}): self.app = app self.name = app.function_name self.log = logging.getLogger("goblet.backend") self.log.setLevel(logging.INFO) self.zip_path = get_g_dir() + f"/{self.name}.zip" self.zipf = self._create_zip() self.config = GConfig(config=config) # specifies which files to be zipped custom_files = self.config.custom_files or {} include = ["*.py"] exclude = ["build", "docs", "examples", "test", "tests", "venv"] include.extend(custom_files.get("include", [])) exclude.extend(custom_files.get("exclude", [])) self.zip_config = {"include": include, "exclude": exclude} self.func_path = func_path self.client = client
def destroy(project, location, stage, all): """ Deletes all resources in gcp that are defined the current deployment The --all flag removes cloudfunction artifacts in cloud storage as well """ try: _project = project or get_default_project() if not _project: click.echo( "Project not found. Set --project flag or add to gcloud by using gcloud config set project PROJECT" ) os.environ["GOOGLE_PROJECT"] = _project os.environ["GOOGLE_LOCATION"] = location if stage: os.environ["STAGE"] = stage app = get_goblet_app(GConfig().main_file or "main.py") app.destroy(all) except FileNotFoundError as not_found: click.echo( f"Missing {not_found.filename}. Make sure you are in the correct directory and this file exists" )
def sync(project, location, stage, dryrun): """ Syncs resources that are deployed with current app configuration. This command will delete resources based on naming convention that are no longer in the app configuration. Use --dryrun flag to see what resources are flagged as being deleted. """ try: _project = project or get_default_project() if not _project: click.echo( "Project not found. Set --project flag or add to gcloud by using gcloud config set project PROJECT" ) os.environ["GOOGLE_PROJECT"] = _project os.environ["GOOGLE_LOCATION"] = location if stage: os.environ["STAGE"] = stage app = get_goblet_app(GConfig().main_file or "main.py") app.sync(dryrun) except FileNotFoundError as not_found: click.echo( f"Missing {not_found.filename}. Make sure you are in the correct directory and this file exists" )