def Run(self, args): """Runs the command. Args: args: argparse.Namespace, An object that contains the values for the arguments specified in the .Args() method. Returns: A response object returned by rpc call Validate. """ project = properties.VALUES.core.project.GetOrFail() holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) client = holder.client if args.region is not None and args.load_balancing_scheme: raise exceptions.InvalidArgumentException( '--load-balancing-scheme', 'Cannot specify load balancing scheme for regional URL maps.') # Import UrlMap to be verified data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False) try: url_map = export_util.Import(message_type=client.messages.UrlMap, stream=data, schema_path=_GetSchemaPath( self.ReleaseTrack())) except yaml_validator.ValidationError as e: raise compute_exceptions.ValidationError(str(e)) # Send UrlMap.validate request if args.region is not None: return _SendRegionalRequest(client, project, args.region, url_map) return _SendGlobalRequest(client, project, url_map, args.load_balancing_scheme)
def ParseTemplate(template_file, params=None, params_from_file=None): """Parse and apply params into a template file. Args: template_file: The path to the file to open and parse. params: a dict of param-name -> param-value params_from_file: a dict of param-name -> param-file Returns: The parsed template dict Raises: yaml.Error: When the template file cannot be read or parsed. ArgumentError: If any params are not provided. ValidationError: if the YAML file is invalid. """ params = params or {} params_from_file = params_from_file or {} joined_params = dict(params) for key, file_path in six.iteritems(params_from_file): if key in joined_params: raise exceptions.DuplicateError('Duplicate param key: ' + key) try: joined_params[key] = files.ReadFileContents(file_path) except files.Error as e: raise exceptions.ArgumentError( 'Could not load param key "{0}" from file "{1}": {2}'.format( key, file_path, e.strerror)) template = yaml.load_path(template_file) if not isinstance(template, dict) or 'template' not in template: raise exceptions.ValidationError( 'Invalid template format. Root must be a mapping with single ' '"template" value') (template, missing_params, used_params) = ReplaceTemplateParams(template, joined_params) if missing_params: raise exceptions.ArgumentError( 'Some parameters were present in the template but not provided on ' 'the command line: ' + ', '.join(sorted(missing_params))) unused_params = set(joined_params.keys()) - used_params if unused_params: raise exceptions.ArgumentError( 'Some parameters were specified on the command line but not referenced ' 'in the template: ' + ', '.join(sorted(unused_params))) return template
def Run(self, args): holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) helper = ssl_policies_utils.SslPolicyHelper(holder) client = holder.client ssl_policy_ref = self.SSL_POLICY_ARG.ResolveAsResource( args, holder.resources, scope_lister=compute_flags.GetDefaultScopeLister(holder.client), default_scope=compute_scope.ScopeEnum.GLOBAL) data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False) try: ssl_policy = export_util.Import( message_type=client.messages.SslPolicy, stream=data, schema_path=self.GetSchemaPath()) except yaml_validator.ValidationError as e: raise compute_exceptions.ValidationError(str(e)) # Get existing SSL policy. try: ssl_policy_old = helper.Describe(ssl_policy_ref) except apitools_exceptions.HttpError as error: if error.status_code != 404: raise error # SSL policy does not exist, create a new one. operation_ref = helper.Create(ssl_policy_ref, ssl_policy) return helper.WaitForOperation(ssl_policy_ref, operation_ref, 'Creating SSL policy') # No change, do not send requests to server. if ssl_policy_old == ssl_policy: return console_io.PromptContinue( message=('SSL Policy [{0}] will be overwritten.').format( ssl_policy_ref.Name()), cancel_on_no=True) # Populate id and fingerprint fields. These two fields are manually # removed from the schema files. ssl_policy.id = ssl_policy_old.id ssl_policy.fingerprint = ssl_policy_old.fingerprint operation_ref = helper.Patch(ssl_policy_ref, ssl_policy, False) return helper.WaitForOperation(ssl_policy_ref, operation_ref, 'Updating SSL policy')
def Run(self, args): holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) client = holder.client target_grpc_proxy_ref = self.TARGET_GRPC_PROXY_ARG.ResolveAsResource( args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL) data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False) try: target_grpc_proxy = export_util.Import( message_type=client.messages.TargetGrpcProxy, stream=data, schema_path=self.GetSchemaPath()) except yaml_validator.ValidationError as e: raise compute_exceptions.ValidationError(str(e)) # Get existing target gRPC proxy. try: target_grpc_proxy_old = _Describe(holder, target_grpc_proxy_ref) except apitools_exceptions.HttpError as error: if error.status_code != 404: raise error # Target gRPC proxy does not exit, create a new one. return _Create(holder, target_grpc_proxy, target_grpc_proxy_ref) if target_grpc_proxy_old == target_grpc_proxy: return console_io.PromptContinue( message=('Target Grpc Proxy [{0}] will be overwritten.').format( target_grpc_proxy_ref.Name()), cancel_on_no=True) # Populate id and fingerprint fields. These two fields are manually # removed from the schema files. target_grpc_proxy.id = target_grpc_proxy_old.id target_grpc_proxy.fingerprint = target_grpc_proxy_old.fingerprint return _Patch(client, target_grpc_proxy_ref, target_grpc_proxy)
def Run(self, args): holder = base_classes.ComputeApiHolder(self.ReleaseTrack()) client = holder.client resources = holder.resources backend_service_ref = ( flags.GLOBAL_REGIONAL_BACKEND_SERVICE_ARG.ResolveAsResource( args, resources, scope_lister=compute_flags.GetDefaultScopeLister(client))) data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False) try: backend_service = export_util.Import( message_type=client.messages.BackendService, stream=data, schema_path=self.GetSchemaPath()) except yaml_validator.ValidationError as e: raise exceptions.ValidationError(str(e)) # Get existing backend service. try: backend_service_old = backend_services_utils.SendGetRequest( client, backend_service_ref) except apitools_exceptions.HttpError as error: if error.status_code != 404: raise error # Backend service does not exist, create a new one. return self.SendInsertRequest(client, resources, backend_service_ref, backend_service) # No change, do not send requests to server. if backend_service_old == backend_service: return console_io.PromptContinue( message=('Backend Service [{0}] will be overwritten.').format( backend_service_ref.Name()), cancel_on_no=True) # populate id and fingerprint fields. These two fields are manually # removed from the schema files. backend_service.id = backend_service_old.id backend_service.fingerprint = backend_service_old.fingerprint # Unspecified fields are assumed to be cleared. cleared_fields = [] if not backend_service.securitySettings: cleared_fields.append('securitySettings') if not backend_service.localityLbPolicy: cleared_fields.append('localityLbPolicy') if not backend_service.circuitBreakers: cleared_fields.append('circuitBreakers') if not backend_service.consistentHash: cleared_fields.append('consistentHash') if not backend_service.outlierDetection: cleared_fields.append('outlierDetection') if not backend_service.customRequestHeaders: cleared_fields.append('customRequestHeaders') if not backend_service.customResponseHeaders: cleared_fields.append('customResponseHeaders') if backend_service.cdnPolicy: cdn_policy = backend_service.cdnPolicy if cdn_policy.defaultTtl is None: cleared_fields.append('cdnPolicy.defaultTtl') if cdn_policy.clientTtl is None: cleared_fields.append('cdnPolicy.clientTtl') if cdn_policy.maxTtl is None: cleared_fields.append('cdnPolicy.maxTtl') if not cdn_policy.negativeCachingPolicy: cleared_fields.append('cdnPolicy.negativeCachingPolicy') if not cdn_policy.bypassCacheOnRequestHeaders: cleared_fields.append('cdnPolicy.bypassCacheOnRequestHeaders') if cdn_policy.serveWhileStale is None: cleared_fields.append('cdnPolicy.serveWhileStale') if cdn_policy.requestCoalescing is None: cleared_fields.append('cdnPolicy.requestCoalescing') else: cleared_fields.append('cdnPolicy') with client.apitools_client.IncludeFields(cleared_fields): return self.SendPatchRequest(client, resources, backend_service_ref, backend_service)
def _Run(args, holder, target_http_proxy_arg, release_track): """Issues requests necessary to import target HTTP proxies.""" client = holder.client resources = holder.resources target_http_proxy_ref = target_http_proxy_arg.ResolveAsResource( args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL, scope_lister=compute_flags.GetDefaultScopeLister(client)) data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False) try: target_http_proxy = export_util.Import( message_type=client.messages.TargetHttpProxy, stream=data, schema_path=_GetSchemaPath(release_track)) except yaml_validator.ValidationError as e: raise compute_exceptions.ValidationError(str(e)) # Get existing target HTTP proxy. try: target_http_proxy_old = target_http_proxies_utils.SendGetRequest( client, target_http_proxy_ref) except apitools_exceptions.HttpError as error: if error.status_code != 404: raise error # Target HTTP proxy does not exist, create a new one. return _SendInsertRequest(client, resources, target_http_proxy_ref, target_http_proxy) if target_http_proxy_old == target_http_proxy: return console_io.PromptContinue( message=('Target Http Proxy [{0}] will be overwritten.').format( target_http_proxy_ref.Name()), cancel_on_no=True) # Populate id and fingerprint fields. These two fields are manually # removed from the schema files. target_http_proxy.id = target_http_proxy_old.id target_http_proxy.fingerprint = target_http_proxy_old.fingerprint # Unspecified fields are assumed to be cleared. cleared_fields = [] if target_http_proxy.description is None: cleared_fields.append('description') # The REST API will reject requests without the UrlMap. However, we want to # avoid doing partial validations in the client and rely on server side # behavior. if target_http_proxy.urlMap is None: cleared_fields.append('urlMap') if release_track != base.ReleaseTrack.GA: if target_http_proxy.proxyBind is None: cleared_fields.append('proxyBind') with client.apitools_client.IncludeFields(cleared_fields): return _SendPatchRequest(client, resources, target_http_proxy_ref, target_http_proxy)
def _Run(args, holder, target_https_proxy_arg, release_track): """Issues requests necessary to import target HTTPS proxies.""" client = holder.client resources = holder.resources target_https_proxy_ref = target_https_proxy_arg.ResolveAsResource( args, holder.resources, default_scope=compute_scope.ScopeEnum.GLOBAL, scope_lister=compute_flags.GetDefaultScopeLister(client)) data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False) try: target_https_proxy = export_util.Import( message_type=client.messages.TargetHttpsProxy, stream=data, schema_path=_GetSchemaPath(release_track)) except yaml_validator.ValidationError as e: raise compute_exceptions.ValidationError(str(e)) # Get existing target HTTPS proxy. try: old_target_https_proxy = target_https_proxies_utils.SendGetRequest( client, target_https_proxy_ref) except apitools_exceptions.HttpError as error: if error.status_code != 404: raise error # Target HTTPS proxy does not exist, create a new one. return _SendInsertRequest(client, resources, target_https_proxy_ref, target_https_proxy) if old_target_https_proxy == target_https_proxy: return console_io.PromptContinue( message=('Target Https Proxy [{0}] will be overwritten.').format( target_https_proxy_ref.Name()), cancel_on_no=True) # Populate id and fingerprint fields. These two fields are manually # removed from the schema files. target_https_proxy.id = old_target_https_proxy.id if hasattr(old_target_https_proxy, 'fingerprint'): target_https_proxy.fingerprint = old_target_https_proxy.fingerprint # Unspecified fields are assumed to be cleared. cleared_fields = [] if target_https_proxy.description is None: cleared_fields.append('description') if target_https_proxy.serverTlsPolicy is None: cleared_fields.append('serverTlsPolicy') if target_https_proxy.authorizationPolicy is None: cleared_fields.append('authorizationPolicy') if hasattr(target_https_proxy, 'certificateMap') and target_https_proxy.certificateMap is None: cleared_fields.append('certificateMap') if hasattr(target_https_proxy, 'httpFilters') and not target_https_proxy.httpFilters: cleared_fields.append('httpFilters') if target_https_proxy.proxyBind is None: cleared_fields.append('proxyBind') if target_https_proxy.quicOverride is None: cleared_fields.append('quicOverride') if not target_https_proxy.sslCertificates: cleared_fields.append('sslCertificates') if target_https_proxy.sslPolicy is None: cleared_fields.append('sslPolicy') if target_https_proxy.urlMap is None: cleared_fields.append('urlMap') with client.apitools_client.IncludeFields(cleared_fields): return _SendPatchRequest(client, resources, target_https_proxy_ref, target_https_proxy)
def _Run(args, holder, url_map_arg, release_track): """Issues requests necessary to import URL maps.""" client = holder.client resources = holder.resources url_map_ref = url_map_arg.ResolveAsResource( args, resources, default_scope=compute_scope.ScopeEnum.GLOBAL, scope_lister=compute_flags.GetDefaultScopeLister(client)) data = console_io.ReadFromFileOrStdin(args.source or '-', binary=False) try: url_map = export_util.Import(message_type=client.messages.UrlMap, stream=data, schema_path=_GetSchemaPath(release_track)) except yaml_validator.ValidationError as e: raise compute_exceptions.ValidationError(str(e)) if url_map.name != url_map_ref.Name(): # Replace warning and raise error after 10/01/2021 log.warning( 'The name of the Url Map must match the value of the ' + '\'name\' attribute in the YAML file. Future versions of ' + 'gcloud will fail with an error.') # Get existing URL map. try: url_map_old = url_maps_utils.SendGetRequest(client, url_map_ref) except apitools_exceptions.HttpError as error: if error.status_code != 404: raise error # Url Map does not exist, create a new one. return _SendInsertRequest(client, resources, url_map_ref, url_map) # No change, do not send requests to server. if url_map_old == url_map: return console_io.PromptContinue( message=('Url Map [{0}] will be overwritten.').format( url_map_ref.Name()), cancel_on_no=True) # Populate id and fingerprint fields when YAML files don't contain them. if not url_map.id: url_map.id = url_map_old.id if url_map.fingerprint: # Replace warning and raise error after 10/01/2021 log.warning( 'An up-to-date fingerprint must be provided to ' + 'update the Url Map. Future versions of gcloud will fail ' + 'with an error \'412 conditionNotMet\'') url_map.fingerprint = url_map_old.fingerprint # Unspecified fields are assumed to be cleared. # TODO(b/182287403) Replace with proto reflection and update scenario tests. cleared_fields = [] if not url_map.description: cleared_fields.append('description') if not url_map.hostRules: cleared_fields.append('hostRules') if not url_map.pathMatchers: cleared_fields.append('pathMatchers') if not url_map.tests: cleared_fields.append('tests') if not url_map.defaultService: cleared_fields.append('defaultService') if not url_map.defaultRouteAction: cleared_fields.append('defaultRouteAction') else: cleared_fields = cleared_fields + _GetClearedFieldsForRoutAction( url_map.defaultRouteAction, 'defaultRouteAction.') if not url_map.defaultUrlRedirect: cleared_fields.append('defaultUrlRedirect') else: cleared_fields = cleared_fields + _GetClearedFieldsForUrlRedirect( url_map.defaultUrlRedirect, 'defaultUrlRedirect.') if not url_map.headerAction: cleared_fields.append('headerAction') else: cleared_fields = cleared_fields + _GetClearedFieldsForHeaderAction( url_map.headerAction, 'headerAction.') with client.apitools_client.IncludeFields(cleared_fields): return _SendPatchRequest(client, resources, url_map_ref, url_map)