def prepare_functions_bundle(function_code_path, tmpdir, pcml_lib_root): pcml_lib_root = os.path.join(get_pcml_root(), "pcml") # Recursive copy pcml_root/pcml into tmpdir tf.gfile.MakeDirs(os.path.join(tmpdir, "lib")) tmp_pcml_path = os.path.join(tmpdir, "lib", "pcml") run_and_output(["cp", "-r", pcml_lib_root, tmp_pcml_path]) run_and_output(["ls", tmp_pcml_path]) # Replace __init__.py with an empty one tmp_init = os.path.join(tmp_pcml_path, "__init__.py") with tf.gfile.Open(tmp_init, "w") as f: f.write("") # Copy in function code source_function_code_path = os.path.join(pcml_lib_root, function_code_path) tmp_lib_path = os.path.join(tmpdir, "lib") def _allow_filename(filename): if filename.endswith(".py") or filename.endswith(".txt"): return True if filename == "Dockerfile": return True return False for filename in tf.gfile.ListDirectory(source_function_code_path): if _allow_filename(filename): source_path = os.path.join(source_function_code_path, filename) target_path = os.path.join(tmp_lib_path, filename) tf.gfile.Copy(source_path, target_path) return tmp_lib_path
def update_service(function_name, image_uri, region, memory="2Gi", concurrency=40, timeout="10m"): """Update a cloud run service given a container image.""" run_and_output([ "gcloud", "beta", "run", "deploy", "--platform", "managed", "--region", region, function_name, "--image", image_uri, "--memory", memory, "--timeout", timeout, "--concurrency", str(concurrency) ])
def maybe_add_pubsub_token_creator_policy(project_id): project_number = get_project_number(project_id) pubsub_sa = "service-{}".format(project_number) pubsub_sa += "@gcp-sa-pubsub.iam.gserviceaccount.com" member_arg = "--member=serviceAccount:{}".format(pubsub_sa) role_arg = "--role=roles/iam.serviceAccountTokenCreator" run_and_output([ "gcloud", "projects", "add-iam-policy-binding", project_number, member_arg, role_arg ])
def e2e_test_function(function_name, trigger_message, trigger_topic, project, service_account, region, staging, deploy_fn, expect_string, wait_seconds=60): deploy_fn(project_id=project, service_account=service_account, region=region, staging_root=staging) start_time = datetime.datetime.now().isoformat() publisher_client = pubsub_v1.PublisherClient() topic_path = publisher_client.topic_path(project, trigger_topic) data = json.dumps(trigger_message.__dict__).encode('utf-8') publisher_client.publish(topic_path, data=data).result() # Wait for function execution to complete including coldstart time.sleep(wait_seconds) # Check logs after a delay logs = run_and_output([ 'gcloud', 'alpha', 'functions', 'logs', 'read', function_name, '--start-time', start_time, '--project', project ]) tf.logging.info("fn logs: {}".format(logs)) assert expect_string in logs
def configure_invoker_sa(service_name, project, region): service_account_name = "{}-invoker".format(service_name) service_account_email = maybe_create_service_account( service_account_name, project) member_arg = "--member=serviceAccount:{}".format(service_account_email) role_arg = "--role=roles/run.invoker" run_and_output([ "gcloud", "beta", "run", "services", "add-iam-policy-binding", "--platform", "managed", "--region", region, service_name, member_arg, role_arg ]) return service_account_email
def stage_functions_bundle(gcs_staging_path, function_code_path): pcml_lib_root = os.path.join(get_pcml_root(), "pcml") with TemporaryDirectory() as tmpdir: tmp_lib_path = prepare_functions_bundle(function_code_path, tmpdir, pcml_lib_root) local_zip_filename = "bundle.zip" local_zip_path = os.path.join(tmpdir, local_zip_filename) remote_zip_path = os.path.join( gcs_staging_path, "{}-{}".format(_timestamp(), local_zip_filename)) # Create zip os.chdir(tmp_lib_path) run_and_output(["zip", "-r", local_zip_path, "./"]) tf.gfile.Copy(local_zip_path, remote_zip_path, overwrite=True) return remote_zip_path
def get_domain_for_cloudrun_service(service_name, region): out = run_and_output([ "gcloud", "beta", "run", "services", "describe", service_name, "--platform", "managed", "--region", region ]) domain = None for line in out.split("\n"): if "domain" in line: domain = line.split("domain:")[1].split(" ")[1] return domain
def maybe_create_subscription_for_service(service_name, service_account_email, service_url, project, region, topic_name): subscriptions = list_subscriptions_in_project(project) # Create a unique subscription ID subscription_name = "{}-{}".format(service_name, topic_name) subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project, subscription_name) publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project, topic_name) if subscription_path not in subscriptions: run_and_output([ "gcloud", "beta", "pubsub", "subscriptions", "create", subscription_path, "--topic", topic_path, "--push-endpoint={}".format(service_url), "--push-auth-service-account={}".format(service_account_email) ])
def deploy_firestore_responder(function_name, event_type, project_id, collection, document_path, service_account=None, source=None, runtime="python37", region="us-central1", memory="256MB", timeout="60s"): """Convenience wrapper for deployment of firestore responder fn. Notes: * Service account defaults to {project name}@appspot.gserviceaccount.com """ _validate_runtime(runtime) event_type_longhand = _lookup_firestore_event_type(event_type) triggering_resource = "projects/{}/databases/(default)/".format(project_id) triggering_resource += "documents/{}/{}".format(collection, document_path) msg = "Function {} will trigger on {} ".format(function_name, event_type_longhand) msg += "in response to triggering resource {}.".format(triggering_resource) tf.logging.info(msg) cmd = [ "gcloud", "functions", "deploy", function_name, "--trigger-event", event_type_longhand, "--trigger-resource", triggering_resource, "--runtime", runtime, "--source", source, "--memory", memory, "--timeout", timeout ] if service_account: cmd.extend(["--service-account", service_account]) if region: cmd.extend(["--region", region]) return run_and_output(cmd)
def deploy_topic_responder(function_name, trigger_topic, project_id, service_account=None, source=None, runtime="python37", region="us-central1", create_topic=True, create_done_topic=True, memory="256MB", timeout="60s", max_instances=1000): _validate_runtime(runtime) msg = "Function {} will be triggered by topic {} ".format( function_name, trigger_topic) tf.logging.info(msg) if create_topic: _create_topic(project_id, trigger_topic) if create_done_topic: _create_topic(project_id, trigger_topic + "-done") cmd = [ "gcloud", "functions", "deploy", function_name, "--trigger-topic", trigger_topic, "--runtime", runtime, "--source", source, "--memory", memory, "--timeout", timeout, "--max-instances", str(max_instances) ] if service_account: cmd.extend(["--service-account", service_account]) if region: cmd.extend(["--region", region]) return run_and_output(cmd)
def test_cmd_runs(self): """Test that we can run a command.""" _ = cmd_utils.run_and_output(["ls", "/tmp"])