def _ApplyEnvVarsArgsToFunction(function, args): """Determines if environment variables have to be updated. It compares the cli args with the existing environment variables to compute the resulting build environment variables. Args: function: CloudFunction message to be checked and filled with env vars based on the flags args: all cli args Returns: updated_fields: update mask containing the list of fields that are considered for updating based on the cli args and existing variables """ updated_fields = [] old_env_vars = env_vars_api_util.GetEnvVarsAsDict( function.environmentVariables) env_var_flags = map_util.GetMapFlagsFromArgs('env-vars', args) new_env_vars = map_util.ApplyMapFlags(old_env_vars, **env_var_flags) if old_env_vars != new_env_vars: env_vars_type_class = api_util.GetApiMessagesModule( ).CloudFunction.EnvironmentVariablesValue function.environmentVariables = env_vars_api_util.DictToEnvVarsProperty( env_vars_type_class, new_env_vars) updated_fields.append('environmentVariables') return updated_fields
def SetFunctionLabels(function, update_labels, remove_labels, clear_labels): """Set the labels on a function based on args. Args: function: the function to set the labels on update_labels: a dict of <label-name>-<label-value> pairs for the labels to be updated, from --update-labels remove_labels: a list of the labels to be removed, from --remove-labels clear_labels: a bool representing whether or not to clear all labels, from --clear-labels Returns: A bool indicating whether or not any labels were updated on the function. """ labels_to_update = update_labels or {} labels_to_update['deployment-tool'] = 'cli-gcloud' labels_diff = args_labels_util.Diff(additions=labels_to_update, subtractions=remove_labels, clear=clear_labels) messages = api_util.GetApiMessagesModule() labels_update = labels_diff.Apply(messages.CloudFunction.LabelsValue, function.labels) if labels_update.needs_update: function.labels = labels_update.labels return True return False
def SetFunctionSourceProps(function, function_ref, source_arg, stage_bucket, ignore_file=None, kms_key=None): """Add sources to function. Args: function: The function to add a source to. function_ref: The reference to the function. source_arg: Location of source code to deploy. stage_bucket: The name of the Google Cloud Storage bucket where source code will be stored. ignore_file: custom ignore_file name. Override .gcloudignore file to customize files to be skipped. kms_key: KMS key configured for the function. Returns: A list of fields on the function that have been changed. Raises: FunctionsError: If the kms_key doesn't exist or GCF P4SA lacks permissions. """ function.sourceArchiveUrl = None function.sourceRepository = None function.sourceUploadUrl = None messages = api_util.GetApiMessagesModule() if source_arg is None: source_arg = '.' source_arg = source_arg or '.' if source_arg.startswith('gs://'): if not source_arg.endswith('.zip'): # Users may have .zip archives with unusual names, and we don't want to # prevent those from being deployed; the deployment should go through so # just warn here. log.warning( '[{}] does not end with extension `.zip`. ' 'The `--source` argument must designate the zipped source archive ' 'when providing a Google Cloud Storage URI.'.format( source_arg)) function.sourceArchiveUrl = source_arg return ['sourceArchiveUrl'] elif source_arg.startswith('https://'): function.sourceRepository = messages.SourceRepository( url=_AddDefaultBranch(source_arg)) return ['sourceRepository'] with file_utils.TemporaryDirectory() as tmp_dir: zip_file = _CreateSourcesZipFile(tmp_dir, source_arg, ignore_file) service = api_util.GetApiClientInstance().projects_locations_functions upload_url = UploadFile(zip_file, stage_bucket, messages, service, function_ref, kms_key) if upload_url.startswith('gs://'): function.sourceArchiveUrl = upload_url return ['sourceArchiveUrl'] else: function.sourceUploadUrl = upload_url return ['sourceUploadUrl']
def Run(args): """List Google Cloud Functions.""" client = util.GetApiClientInstance() messages = util.GetApiMessagesModule() project = properties.VALUES.core.project.GetOrFail() limit = args.limit return YieldFromLocations(args.regions, project, limit, messages, client)
def _ApplySecretsArgsToFunction(function, args): """Populates cloud function message with secrets payload if applicable. It compares the CLI args with the existing secrets configuration to compute the effective secrets configuration. Args: function: Cloud function message to be checked and populated. args: All CLI arguments. Returns: updated_fields: update mask containing the list of fields to be updated. """ if not secrets_config.IsArgsSpecified(args): return [] old_secrets = secrets_util.GetSecretsAsDict( function.secretEnvironmentVariables, function.secretVolumes) new_secrets = {} try: new_secrets = secrets_config.ApplyFlags( old_secrets, args, _GetProject(), project_util.GetProjectNumber(_GetProject())) except ArgumentTypeError as error: exceptions.reraise(function_exceptions.FunctionsError(error)) if new_secrets: _LogSecretsPermissionMessage(_GetProject(), function.serviceAccountEmail) old_secret_env_vars, old_secret_volumes = secrets_config.SplitSecretsDict( old_secrets) new_secret_env_vars, new_secret_volumes = secrets_config.SplitSecretsDict( new_secrets) updated_fields = [] if old_secret_env_vars != new_secret_env_vars: function.secretEnvironmentVariables = secrets_util.SecretEnvVarsToMessages( new_secret_env_vars, api_util.GetApiMessagesModule()) updated_fields.append('secretEnvironmentVariables') if old_secret_volumes != new_secret_volumes: function.secretVolumes = secrets_util.SecretVolumesToMessages( new_secret_volumes, api_util.GetApiMessagesModule()) updated_fields.append('secretVolumes') return updated_fields
def Run(self, args): client = util.GetApiClientInstance() messages = util.GetApiMessagesModule() if args.regions: locations = args.regions else: locations = ['-'] project = properties.VALUES.core.project.GetOrFail() limit = args.limit return self._YieldFromLocations(locations, project, limit, messages, client)
def SecretVolumesToMessages(secret_volumes_dict, project): """Converts secrets from dict to cloud function SecretVolume message list. Args: secret_volumes_dict: Secrets volumes configuration dict. Prefers a sorted ordered dict for consistency. project: Project id of project that hosts the secret. Returns: A list of cloud function SecretVolume message. """ secret_volumes_messages = [] messages = util.GetApiMessagesModule() mount_path_to_secrets = collections.defaultdict(list) for secret_volume_key, secret_volume_value in six.iteritems( secret_volumes_dict): mount_path = secret_volume_key.split(':')[0] secret_file_path = secret_volume_key.split(':')[1] secret = secret_volume_value.split(':')[0] version = secret_volume_value.split(':')[1] mount_path_to_secrets[mount_path].append({ 'path': secret_file_path, 'secret': secret, 'version': version }) mount_path_to_secrets = collections.OrderedDict( sorted(six.iteritems(mount_path_to_secrets))) for mount_path, secret_path_values in six.iteritems(mount_path_to_secrets): secret = secret_path_values[0]['secret'] secret_version_messages = [] for secret_path_value in secret_path_values: secret_version_messages.append( messages.SecretVersion( path=secret_path_value['path'], version=secret_path_value['version'])) secret_volumes_messages.append( messages.SecretVolume( mountPath=mount_path, projectId=project, secret=secret, versions=secret_version_messages)) return secret_volumes_messages
def CreateEventTrigger(trigger_provider, trigger_event, trigger_resource): """Create event trigger message. Args: trigger_provider: str, trigger provider label. trigger_event: str, trigger event label. trigger_resource: str, trigger resource name. Returns: A EventTrigger protobuf message. """ messages = api_util.GetApiMessagesModule() event_trigger = messages.EventTrigger() event_trigger.eventType = trigger_event if trigger_provider == triggers.UNADVERTISED_PROVIDER_LABEL: event_trigger.resource = trigger_resource else: event_trigger.resource = ( ConvertTriggerArgsToRelativeName(trigger_provider, trigger_event, trigger_resource)) return event_trigger
def Run(args, release_track): """List Google Cloud Functions.""" client = api_util.GetClientInstance(release_track=release_track) messages = api_util.GetMessagesModule(release_track=release_track) project = properties.VALUES.core.project.GetOrFail() limit = args.limit list_v2_generator = _YieldFromLocations(args.regions, project, limit, messages, client) # Currently GCF v2 exists in staging so users of GCF v2 have in their config # the api_endpoint_overrides of cloudfunctions. # To list GCF v1 resources use _OverrideEndpointOverrides to forcibly # overwrites's the user config's override with the original v1 endpoint. with _OverrideEndpointOverrides('cloudfunctions', 'https://cloudfunctions.googleapis.com/'): client = api_v1_util.GetApiClientInstance() messages = api_v1_util.GetApiMessagesModule() list_v1_generator = command.YieldFromLocations(args.regions, project, limit, messages, client) combined_generator = itertools.chain(list_v2_generator, list_v1_generator) return combined_generator
def SecretEnvVarsToMessages(secret_env_vars_dict, project): """Converts secrets from dict to cloud function SecretEnvVar message list. Args: secret_env_vars_dict: Secret environment variables configuration dict. Prefers a sorted ordered dict for consistency. project: Project id of project that hosts the secret. Returns: A list of cloud function SecretEnvVar message. """ secret_environment_variables = [] messages = util.GetApiMessagesModule() for secret_env_var_key, secret_env_var_value in six.iteritems( secret_env_vars_dict): secret = secret_env_var_value.split(':')[0] version = secret_env_var_value.split(':')[1] secret_environment_variables.append( messages.SecretEnvVar( key=secret_env_var_key, projectId=project, secret=secret, version=version)) return secret_environment_variables
def Run(args, track=None, enable_runtime=True, enable_build_worker_pool=False, enable_security_level=False): """Run a function deployment with the given args.""" # Check for labels that start with `deployment`, which is not allowed. labels_util.CheckNoDeploymentLabels('--remove-labels', args.remove_labels) labels_util.CheckNoDeploymentLabels('--update-labels', args.update_labels) # Check that exactly one trigger type is specified properly. trigger_util.ValidateTriggerArgs(args.trigger_event, args.trigger_resource, args.IsSpecified('retry'), args.IsSpecified('trigger_http')) trigger_params = trigger_util.GetTriggerEventParams( args.trigger_http, args.trigger_bucket, args.trigger_topic, args.trigger_event, args.trigger_resource) function_ref = args.CONCEPTS.name.Parse() function_url = function_ref.RelativeName() messages = api_util.GetApiMessagesModule(track) # Get an existing function or create a new one. function = api_util.GetFunction(function_url) is_new_function = function is None had_vpc_connector = bool( function.vpcConnector) if not is_new_function else False had_http_trigger = bool( function.httpsTrigger) if not is_new_function else False if is_new_function: trigger_util.CheckTriggerSpecified(args) function = messages.CloudFunction() function.name = function_url elif trigger_params: # If the new deployment would implicitly change the trigger_event type # raise error trigger_util.CheckLegacyTriggerUpdate(function.eventTrigger, trigger_params['trigger_event']) # Keep track of which fields are updated in the case of patching. updated_fields = [] # Populate function properties based on args. if args.entry_point: function.entryPoint = args.entry_point updated_fields.append('entryPoint') if args.timeout: function.timeout = '{}s'.format(args.timeout) updated_fields.append('timeout') if args.memory: function.availableMemoryMb = utils.BytesToMb(args.memory) updated_fields.append('availableMemoryMb') if args.service_account: function.serviceAccountEmail = args.service_account updated_fields.append('serviceAccountEmail') if (args.IsSpecified('max_instances') or args.IsSpecified('clear_max_instances')): max_instances = 0 if args.clear_max_instances else args.max_instances function.maxInstances = max_instances updated_fields.append('maxInstances') if enable_runtime: if args.IsSpecified('runtime'): function.runtime = args.runtime updated_fields.append('runtime') if args.runtime in ['nodejs6', 'nodejs8']: log.warning( ('The {version} runtime is deprecated on Cloud Functions. ' 'Please migrate to Node.js 10 ' '(--runtime=nodejs10). ' 'See https://cloud.google.com/functions/docs/migrating/' 'nodejs-runtimes').format( version='Node.js 6' if args.runtime == 'nodejs6' else 'Node.js 8')) elif is_new_function: raise exceptions.RequiredArgumentException( 'runtime', 'Flag `--runtime` is required for new functions.') if args.vpc_connector or args.clear_vpc_connector: function.vpcConnector = ('' if args.clear_vpc_connector else args.vpc_connector) updated_fields.append('vpcConnector') if args.IsSpecified('egress_settings'): will_have_vpc_connector = ((had_vpc_connector and not args.clear_vpc_connector) or args.vpc_connector) if not will_have_vpc_connector: raise exceptions.RequiredArgumentException( 'vpc-connector', 'Flag `--vpc-connector` is ' 'required for setting `egress-settings`.') egress_settings_enum = arg_utils.ChoiceEnumMapper( arg_name='egress_settings', message_enum=function.VpcConnectorEgressSettingsValueValuesEnum, custom_mappings=flags.EGRESS_SETTINGS_MAPPING).GetEnumForChoice( args.egress_settings) function.vpcConnectorEgressSettings = egress_settings_enum updated_fields.append('vpcConnectorEgressSettings') if args.IsSpecified('ingress_settings'): ingress_settings_enum = arg_utils.ChoiceEnumMapper( arg_name='ingress_settings', message_enum=function.IngressSettingsValueValuesEnum, custom_mappings=flags.INGRESS_SETTINGS_MAPPING).GetEnumForChoice( args.ingress_settings) function.ingressSettings = ingress_settings_enum updated_fields.append('ingressSettings') if enable_build_worker_pool: if args.build_worker_pool or args.clear_build_worker_pool: function.buildWorkerPool = ('' if args.clear_build_worker_pool else args.build_worker_pool) updated_fields.append('buildWorkerPool') # Populate trigger properties of function based on trigger args. if args.trigger_http: function.httpsTrigger = messages.HttpsTrigger() function.eventTrigger = None updated_fields.extend(['eventTrigger', 'httpsTrigger']) if trigger_params: function.eventTrigger = trigger_util.CreateEventTrigger( **trigger_params) function.httpsTrigger = None updated_fields.extend(['eventTrigger', 'httpsTrigger']) if args.IsSpecified('retry'): updated_fields.append('eventTrigger.failurePolicy') if args.retry: function.eventTrigger.failurePolicy = messages.FailurePolicy() function.eventTrigger.failurePolicy.retry = messages.Retry() else: function.eventTrigger.failurePolicy = None elif function.eventTrigger: function.eventTrigger.failurePolicy = None if enable_security_level: if args.IsSpecified('security_level'): will_have_http_trigger = had_http_trigger or args.trigger_http if not will_have_http_trigger: raise exceptions.RequiredArgumentException( 'trigger-http', 'Flag `--trigger-http` is required for setting `security-level`.' ) security_level_enum = arg_utils.ChoiceEnumMapper( arg_name='security_level', message_enum=function.httpsTrigger. SecurityLevelValueValuesEnum, custom_mappings=flags.SECURITY_LEVEL_MAPPING).GetEnumForChoice( args.security_level) function.httpsTrigger.securityLevel = security_level_enum updated_fields.append('httpsTrigger.securityLevel') # Populate source properties of function based on source args. # Only Add source to function if its explicitly provided, a new function, # using a stage bucket or deploy of an existing function that previously # used local source. if (args.source or args.stage_bucket or is_new_function or function.sourceUploadUrl): updated_fields.extend( source_util.SetFunctionSourceProps(function, function_ref, args.source, args.stage_bucket, args.ignore_file)) # Apply label args to function if labels_util.SetFunctionLabels(function, args.update_labels, args.remove_labels, args.clear_labels): updated_fields.append('labels') # Apply build environment variables args to function updated_fields.extend(_ApplyBuildEnvVarsArgsToFunction(function, args)) # Apply environment variables args to function updated_fields.extend(_ApplyEnvVarsArgsToFunction(function, args)) ensure_all_users_invoke = flags.ShouldEnsureAllUsersInvoke(args) deny_all_users_invoke = flags.ShouldDenyAllUsersInvoke(args) if is_new_function: if (function.httpsTrigger and not ensure_all_users_invoke and not deny_all_users_invoke and api_util.CanAddFunctionIamPolicyBinding(_GetProject())): ensure_all_users_invoke = console_io.PromptContinue(prompt_string=( 'Allow unauthenticated invocations of new function [{}]?'. format(args.NAME)), default=False) op = api_util.CreateFunction(function, function_ref.Parent().RelativeName()) if (function.httpsTrigger and not ensure_all_users_invoke and not deny_all_users_invoke): template = ('Function created with limited-access IAM policy. ' 'To enable unauthorized access consider "%s"') log.warning(template % _CreateBindPolicyCommand(args.NAME, args.region)) deny_all_users_invoke = True elif updated_fields: op = api_util.PatchFunction(function, updated_fields) else: op = None # Nothing to wait for if not ensure_all_users_invoke and not deny_all_users_invoke: log.status.Print('Nothing to update.') return stop_trying_perm_set = [False] # The server asyncrhonously sets allUsers invoker permissions some time after # we create the function. That means, to remove it, we need do so after the # server adds it. We can remove this mess after the default changes. # TODO(b/130604453): Remove the "remove" path, only bother adding. Remove the # logic from the polling loop. Remove the ability to add logic like this to # the polling loop. # Because of the DRS policy restrictions, private-by-default behavior is not # guaranteed for all projects and we need this hack until IAM deny is # implemented and all projects have private-by-default. def TryToSetInvokerPermission(): """Try to make the invoker permission be what we said it should. This is for executing in the polling loop, and will stop trying as soon as it succeeds at making a change. """ if stop_trying_perm_set[0]: return try: if ensure_all_users_invoke: api_util.AddFunctionIamPolicyBinding(function.name) stop_trying_perm_set[0] = True elif deny_all_users_invoke: stop_trying_perm_set[0] = ( api_util.RemoveFunctionIamPolicyBindingIfFound( function.name)) except exceptions.HttpException: stop_trying_perm_set[0] = True log.warning('Setting IAM policy failed, try "%s"' % _CreateBindPolicyCommand(args.NAME, args.region)) log_stackdriver_url = [True] def TryToLogStackdriverURL(op): """Logs stackdriver URL. This is for executing in the polling loop, and will stop trying as soon as it succeeds at making a change. Args: op: the operation """ if log_stackdriver_url[0] and op.metadata: metadata = encoding.PyValueToMessage( messages.OperationMetadataV1, encoding.MessageToPyValue(op.metadata)) if metadata.buildId: sd_info_template = '\nFor Cloud Build Stackdriver Logs, visit: %s' log.status.Print(sd_info_template % _CreateStackdriverURLforBuildLogs( metadata.buildId, _GetProject())) log_stackdriver_url[0] = False if op: try_set_invoker = None if function.httpsTrigger: try_set_invoker = TryToSetInvokerPermission api_util.WaitForFunctionUpdateOperation( op, try_set_invoker=try_set_invoker, on_every_poll=[TryToLogStackdriverURL]) return api_util.GetFunction(function.name)
def _BuildRequest(self): messages = util.GetApiMessagesModule() project = properties.VALUES.core.project.GetOrFail() return messages.CloudfunctionsProjectsLocationsListRequest( name='projects/' + project, )