def DetectType(ref, args, request): """Detect Entry type. Args: ref: The entry resource reference. args: The parsed args namespace. request: The update entry request. Returns: Request with proper type """ del ref client = entries_v1.EntriesClient() messages = client.messages if args.IsSpecified('kafka_cluster_bootstrap_servers'): arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1Entry.type', messages.GoogleCloudDatacatalogV1Entry.TypeValueValuesEnum.CLUSTER) if args.IsSpecified('kafka_topic'): arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1Entry.type', messages. GoogleCloudDatacatalogV1Entry.TypeValueValuesEnum.DATA_STREAM) return request
def Search(self, query, include_gcp_public_datasets, include_organization_ids, include_project_ids, order_by, page_size, limit): """Parses search args into the request.""" request = self.messages.GoogleCloudDatacatalogV1beta1SearchCatalogRequest( query=query, orderBy=order_by, ) if include_gcp_public_datasets: arg_utils.SetFieldInMessage(request, 'scope.includeGcpPublicDatasets', include_gcp_public_datasets) if include_organization_ids: arg_utils.SetFieldInMessage(request, 'scope.includeOrgIds', include_organization_ids) if include_project_ids: arg_utils.SetFieldInMessage(request, 'scope.includeProjectIds', include_project_ids) return list_pager.YieldFromList( self.service, request, batch_size=page_size, limit=limit, method='Search', field='results', batch_size_attribute='pageSize', )
def CreateRequest(self, namespace, static_fields=None, resource_method_params=None): """Generates the request object for the method call from the parsed args. Args: namespace: The argparse namespace. static_fields: {str, value}, A mapping of API field name to value to insert into the message. This is a convenient way to insert extra data while the request is being constructed for fields that don't have corresponding arguments. resource_method_params: {str: str}, A mapping of API method parameter name to resource ref attribute name when the API method uses non-standard names. Returns: The apitools message to be send to the method. """ static_fields = static_fields or {} resource_method_params = resource_method_params or {} message_type = self.method.GetRequestType() message = message_type() # Insert static fields into message. for field_path, value in static_fields.iteritems(): field = arg_utils.GetFieldFromMessage(message_type, field_path) arg_utils.SetFieldInMessage(message, field_path, arg_utils.ConvertValue(field, value)) # Parse api Fields into message. self._ParseArguments(message, namespace) ref = self._ParseResourceArg(namespace) if not ref: return message # This only happens for non-list methods where the API method params don't # match the resource parameters (basically only create methods). In this # case, we re-parse the resource as its parent collection (to fill in the # API parameters, and we insert the name of the resource itself into the # correct position in the body of the request method. if (self.method.resource_argument_collection.detailed_params != self.method.request_collection.detailed_params): # Sets the name of the resource in the message object body. arg_utils.SetFieldInMessage(message, self.resource_arg_info[-1].api_field, ref.Name()) # Create a reference for the parent resource to put in the API params. ref = ref.Parent( parent_collection=self.method.request_collection.full_name) # For each method path field, get the value from the resource reference. relative_name = ref.RelativeName() for p in self.method.params: value = getattr(ref, resource_method_params.get(p, p), relative_name) arg_utils.SetFieldInMessage(message, p, value) return message
def ParseUpdateTagArgsIntoRequest(self, args, request): """Parses tag-templates update args into the request.""" tag_template_ref = args.CONCEPTS.tag_template.Parse() arg_utils.SetFieldInMessage(request, 'googleCloudDatacatalogV1Tag.template', tag_template_ref.RelativeName()) arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1Tag.fields', self._ProcessTagFromFile(tag_template_ref, args.tag_file)) return request
def _SetRunOptionInRequest(run_option, run_schedule, request, messages): """Returns request with the run option set.""" if run_option == 'manual': arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1alpha3Crawler.config.adHocRun', messages.GoogleCloudDatacatalogV1alpha3AdhocRun()) elif run_option == 'scheduled': scheduled_run_option = arg_utils.ChoiceToEnum( run_schedule, (messages.GoogleCloudDatacatalogV1alpha3ScheduledRun. ScheduledRunOptionValueValuesEnum)) arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1alpha3Crawler.config.scheduledRun.scheduledRunOption', scheduled_run_option) return request
def ExpectJobResult(self, result_type): result_svc, result_req_params = self._result_data[result_type] result_request_type = result_svc.GetRequestType('Get') result_response_type = result_svc.GetResponseType('Get') if result_type == 'QUERY': result_request_type = result_svc.GetRequestType('GetQueryResults') request = result_request_type(**result_req_params) request.maxResults = 1000 result_response_type = result_svc.GetResponseType('GetQueryResults') job_ref = self.messages.JobReference(**result_req_params) result = result_response_type(jobReference=job_ref) result_svc.GetQueryResults.Expect( request=request, response=result) return result if result_type == 'COPY' or result_type == 'LOAD': table_ref = self.messages.TableReference(**result_req_params) result = result_response_type(tableReference=table_ref) result_svc.Get.Expect( request=result_request_type(**result_req_params), response=result) else: job_ref = self.messages.JobReference(**result_req_params) result = result_response_type(jobReference=job_ref) arg_utils.SetFieldInMessage(result, 'configuration.jobType', 'OTHER') result.status = self._done return result
def MergeGcsFilePatterns(ref, args, request): """Merges user-provided GCS file patterns with existing patterns. Args: ref: The entry resource reference. args: The parsed args namespace. request: The update entry request. Returns: Request with merged GCS file pattern. """ if not _IsChangeFilePatternSpecified(args): return request del ref entry_ref = args.CONCEPTS.entry.Parse() file_patterns = entries.EntriesClient().Get( entry_ref).gcsFilesetSpec.filePatterns or [] if args.IsSpecified('clear_file_patterns'): file_patterns = [] if args.IsSpecified('remove_file_patterns'): to_remove = set(args.remove_file_patterns) file_patterns = [b for b in file_patterns if b not in to_remove] if args.IsSpecified('add_file_patterns'): file_patterns += args.add_file_patterns arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1beta1Entry.gcsFilesetSpec.filePatterns', file_patterns) request.updateMask += ',gcsFilesetSpec.filePatterns' return request
def ExpectedRequest(self): request = self.messages.CloudresourcemanagerOrganizationsGetIamPolicyRequest( organizationsId=self.TEST_ORGANIZATION.name[len('organizations/'):]) arg_utils.SetFieldInMessage( request, 'getIamPolicyRequest.options.requestedPolicyVersion', iam_util.MAX_LIBRARY_IAM_SUPPORTED_VERSION) return request
def BuildListQuery(self, parameter_info, aggregations=None, parent_translator=None): """Builds a list request to list values for the given argument. Args: parameter_info: the runtime ResourceParameterInfo object. aggregations: a list of _RuntimeParameter objects. parent_translator: a ParentTranslator object if needed. Returns: The apitools request. """ method = self.method if method is None: return None message = method.GetRequestType()() for field, value in six.iteritems(self._static_params): arg_utils.SetFieldInMessage(message, field, value) parent = self.GetParent(parameter_info, aggregations=aggregations, parent_translator=parent_translator) if not parent: return message resource_method_params = {} if parent_translator: resource_method_params = parent_translator.ResourceMethodParams(message) arg_utils.ParseResourceIntoMessage( parent, method, message, resource_method_params=resource_method_params) return message
def UpdateCloudRunWithEventingEnabled(self): """Updates operator's cloud run resource spec.eventing.enabled to true.""" messages = self._operator_client.MESSAGES_MODULE cloud_run_message = messages.CloudRun() arg_utils.SetFieldInMessage(cloud_run_message, 'spec.eventing.enabled', True) # We need to specify a special content-type for k8s to accept our PATCH. # However, this appears to only be settable at the client level, not at # the request level. So we'll update the client for our request, and the # set it back to the old value afterwards. old_additional_headers = {} old_additional_headers = self._operator_client.additional_http_headers additional_headers = old_additional_headers.copy() additional_headers['content-type'] = 'application/merge-patch+json' try: self._operator_client.additional_http_headers = additional_headers request = messages.AnthoseventsNamespacesCloudrunsPatchRequest( name=_CLOUD_RUN_RELATIVE_NAME, cloudRun=cloud_run_message) response = self._operator_client.namespaces_cloudruns.Patch( request) finally: self._operator_client.additional_http_headers = old_additional_headers return response
def ParseMessageFromDict(data, mapping, message, additional_fields=None): """Recursively generates the request message and any sub-messages. Args: data: {string: string}, A YAML like object containing the message data. mapping: {string: ApitoolsToKrmFieldDescriptor}, A mapping from message field names to mapping descriptors. message: The apitools class for the message. additional_fields: {string: object}, Additional fields to set in the message that are not mapped from data. Including calculated fields and static values. Returns: The instantiated apitools Message with all fields populated from data. Raises: InvalidDataError: If mapped fields do not exists in data. """ output_message = _MapDictToApiToolsMessage(data, mapping, message) if additional_fields: for field_path, value in six.iteritems(additional_fields): arg_utils.SetFieldInMessage(output_message, field_path, value) return output_message
def ParseBundleSpecsFlagsForUpdate(ref, args, request, crawler): """Python hook that parses the bundle spec args into the update request. Args: ref: The crawler resource reference. args: The parsed args namespace. request: The update crawler request. crawler: CachedResult, The cached crawler result. Returns: Request with bundling specs set appropriately. """ del ref if not _IsChangeBundleSpecsSpecified(args): return request bundle_specs = crawler.Get().config.bundleSpecs or [] if args.IsSpecified('clear_bundle_specs'): bundle_specs = [] if args.IsSpecified('remove_bundle_specs'): to_remove = set(args.remove_bundle_specs) bundle_specs = [b for b in bundle_specs if b not in to_remove] if args.IsSpecified('add_bundle_specs'): bundle_specs += args.add_bundle_specs arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1alpha3Crawler.config.bundleSpecs', bundle_specs) return request
def Patch(self, device_ref, blocked=None, credentials=None, metadata=None, auth_method=None, log_level=None): """Updates a Device. Any fields not specified will not be updated; at least one field must be specified. Args: device_ref: a Resource reference to a cloudiot.projects.locations.registries.devices resource. blocked: bool, whether the device to create should have connections blocked or not. credentials: List of DeviceCredential or None. If given, update the credentials for the device. metadata: MetadataValue, the metadata message for the device. auth_method: GatewayAuthMethodValueValuesEnum, auth method to update on a gateway device. log_level: LogLevelValueValuesEnum, the default logging verbosity for the device. Returns: Device: the updated device. Raises: NoFieldsSpecifiedError: if no fields were specified. """ device = self.messages.Device() update_settings = [ _DeviceUpdateSetting('blocked', 'blocked', blocked), _DeviceUpdateSetting('credentials', 'credentials', credentials), _DeviceUpdateSetting('metadata', 'metadata', metadata), _DeviceUpdateSetting('gatewayConfig.gatewayAuthMethod', 'gatewayConfig.gatewayAuthMethod', auth_method), _DeviceUpdateSetting('logLevel', 'logLevel', log_level), ] update_mask = [] for update_setting in update_settings: if update_setting.value is not None: arg_utils.SetFieldInMessage(device, update_setting.field_name, update_setting.value) update_mask.append(update_setting.update_mask) if not update_mask: raise NoFieldsSpecifiedError( 'Must specify at least one field to update.') patch_req_type = ( self.messages. CloudiotProjectsLocationsRegistriesDevicesPatchRequest) patch_req = patch_req_type(device=device, name=device_ref.RelativeName(), updateMask=','.join(update_mask)) return self._service.Patch(patch_req)
def Parse(arg_value): """Inner method that argparse actually calls.""" result = arg_dict(arg_value) messages = [] for k, v in sorted(six.iteritems(result)): message_instance = message() arg_utils.SetFieldInMessage( message_instance, self.key_spec.api_field, arg_utils.ConvertValue(key_field, k, choices=self.key_spec.ChoiceMap())) arg_utils.SetFieldInMessage( message_instance, self.value_spec.api_field, arg_utils.ConvertValue( value_field, v, choices=self.value_spec.ChoiceMap())) messages.append(message_instance) return messages
def ConstructServiceBindingServiceNameFromArgs(unused_ref, args, request): sd_service_name = ('projects/' + properties.VALUES.core.project.Get() + '/locations/' + args.service_directory_region + '/namespaces/' + args.service_directory_namespace + '/services/' + args.service_directory_service) arg_utils.SetFieldInMessage(request, 'serviceBinding.service', sd_service_name) return request
def _ParseLabelsIntoUpdateMessage(message, args, api_field): existing_labels = _RetrieveFieldValueFromMessage(message, api_field) diff = labels_util.Diff.FromUpdateArgs(args) label_cls = _GetLabelsClass(message, api_field) update_result = diff.Apply(label_cls, existing_labels) if not update_result.needs_update: return False arg_utils.SetFieldInMessage(message, api_field, update_result.labels) return True
def ParseCreateTagTemplateArgsIntoRequest(self, args, request): """Parses tag-templates create args into the request.""" fields = [] for field in args.field: fields.append(self._ParseField(field)) arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1TagTemplate.fields', self.messages.GoogleCloudDatacatalogV1TagTemplate.FieldsValue( additionalProperties=fields, )) return request
def ParsePhysicalSchema(ref, args, request): """Parses physical schema from file after obtaining information about its type. Args: ref: The entry resource reference. args: The parsed args namespace. request: The update entry request. Returns: Request with merged GCS file pattern. Raises: InvalidPhysicalSchemaError: if physical schema type is unknown """ if not args.IsSpecified('physical_schema_type'): return request del ref client = entries_v1.EntriesClient() messages = client.messages if args.IsSpecified('physical_schema_file'): schema_abs_path = path.expanduser(args.physical_schema_file) schema_text = files.ReadFileContents(schema_abs_path) else: schema_text = '' schema_type = args.physical_schema_type if schema_type == 'avro': schema = messages.GoogleCloudDatacatalogV1PhysicalSchemaAvroSchema() schema.text = schema_text elif schema_type == 'thrift': schema = messages.GoogleCloudDatacatalogV1PhysicalSchemaThriftSchema() schema.text = schema_text elif schema_type == 'protobuf': schema = messages.GoogleCloudDatacatalogV1PhysicalSchemaProtobufSchema( ) schema.text = schema_text elif schema_type == 'parquet': schema = messages.GoogleCloudDatacatalogV1PhysicalSchemaParquetSchema() elif schema_type == 'orc': schema = messages.GoogleCloudDatacatalogV1PhysicalSchemaOrcSchema() else: raise InvalidPhysicalSchemaError( 'Unknown physical schema type. Must be one of: avro, thrift, protobuf,' 'parquet, orc') arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1Entry.schema.physicalSchema.' + schema_type, schema) return request
def _ParseLabelsIntoUpdateMessage(message, args, api_field): """Find diff between existing labels and args, set labels into the message.""" diff = labels_util.Diff.FromUpdateArgs(args) # Do nothing if 'labels' arguments weren't specified. if not diff.MayHaveUpdates(): return False existing_labels = _RetrieveFieldValueFromMessage(message, api_field) label_cls = _GetLabelsClass(message, api_field) update_result = diff.Apply(label_cls, existing_labels) if update_result.needs_update: arg_utils.SetFieldInMessage(message, api_field, update_result.labels) return True
def ParseUpdateTagTemplateFieldArgsIntoRequest(self, args, request): """Parses tag-templates fields update args into the request.""" enum_values = [] if args.IsSpecified('add_enum_values'): for value in args.add_enum_values: enum_values.append(self._MakeEnumValue(value)) arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1TagTemplateField.type', self.messages.GoogleCloudDatacatalogV1FieldType(enumType=( self.messages.GoogleCloudDatacatalogV1FieldTypeEnumType( allowedValues=enum_values, )))) return request
def Run(self, args): job_id = hooks.JobIdProcessor(args.job_id) requests_type = hooks.GetApiMessage('BigqueryJobsInsertRequest') request = requests_type() project = args.project or properties.VALUES.core.project.Get( required=True) request.projectId = project request = hooks.ProcessTableCopyConfiguration(None, args, request) request = hooks.ProcessTableCopyOverwrite(None, args, request) arg_utils.SetFieldInMessage(request, 'job.jobReference.jobId', job_id) arg_utils.SetFieldInMessage(request, 'job.jobReference.projectId', project) client = hooks.GetApiClient() job_service = client.jobs job = client.jobs.Insert(request) source_ref = args.CONCEPTS.source.Parse() destination_ref = args.CONCEPTS.destination.Parse() copy_message = 'Copying {0}:{1} to {2}:{3}.'.format( source_ref.Parent().Name(), source_ref.Name(), destination_ref.Parent().Name(), destination_ref.Name()) if not args. async: log.CreatedResource(job.id, kind='Job', details=copy_message) return job result_service = client.tables poller = command_utils.BqJobPoller(job_service, result_service) job_ref = resources.REGISTRY.Parse(job.jobReference.jobId, params={'projectId': project}, collection='bigquery.jobs') result = waiter.WaitFor(poller=poller, operation_ref=job_ref, message=copy_message) log.status.Print('Copied {0}:{1} to {2}:{3}.'.format( source_ref.Parent().Name(), source_ref.Name(), destination_ref.Parent().Name(), destination_ref.Name())) return result
def testUpdateCloudRunWithEventingEnabled(self): cloud_run_message = self.operator_messages.CloudRun() arg_utils.SetFieldInMessage(cloud_run_message, 'spec.eventing.enabled', True) expected_update_request = self.operator_messages.AnthoseventsNamespacesCloudrunsPatchRequest( name=anthosevents_operations._CLOUD_RUN_RELATIVE_NAME, cloudRun=cloud_run_message) self.mock_operator_client.namespaces_cloudruns.Patch.Expect( expected_update_request, cloud_run_message) self.mock_operator_client.additional_http_headers = {} self.anthosevents_client.UpdateCloudRunWithEventingEnabled()
def _SetScopeInRequest(crawl_scope, buckets, request): """Returns request with the crawl scope set.""" client = crawlers.CrawlersClient() messages = client.messages if crawl_scope == 'bucket' and buckets is not None: arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1alpha3Crawler.config.bucketScope.buckets', buckets) elif crawl_scope == 'project': arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1alpha3Crawler.config.projectScope', messages.GoogleCloudDatacatalogV1alpha3ParentProjectScope()) elif crawl_scope == 'organization': arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1alpha3Crawler.config.organizationScope', messages.GoogleCloudDatacatalogV1alpha3ParentOrganizationScope()) return request
def Process(ref, args, request): """The implementation of Process for the hook.""" del ref, args # Unused. update_mask = arg_utils.GetFieldValueFromMessage(request, update_mask_path) if 'version' not in update_mask: if update_mask is None: update_mask = 'version' else: update_mask += ',version' arg_utils.SetFieldInMessage(request, update_mask_path, update_mask) return request
def Parse(arg_value): """Inner method that argparse actually calls.""" result = arg_dict(arg_value) message_instance = message() for f in self.fields: value = result.get(f.arg_name) api_field = arg_utils.GetFieldFromMessage(message, f.api_field) value = arg_utils.ConvertValue(api_field, value, choices=Choice.ToChoiceMap( f.choices)) arg_utils.SetFieldInMessage(message_instance, f.api_field, value) return message_instance
def _SetScopeInRequest(crawl_scope, buckets, request, messages): """Returns request with the crawl scope set.""" if crawl_scope == 'bucket': if not buckets: raise InvalidCrawlScopeError( 'At least one bucket must be included in the crawl scope of a ' 'bucket-scoped crawler.') arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1alpha3Crawler.config.bucketScope.buckets', buckets) elif crawl_scope == 'project': arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1alpha3Crawler.config.projectScope', messages.GoogleCloudDatacatalogV1alpha3ParentProjectScope()) elif crawl_scope == 'organization': arg_utils.SetFieldInMessage( request, 'googleCloudDatacatalogV1alpha3Crawler.config.organizationScope', messages.GoogleCloudDatacatalogV1alpha3ParentOrganizationScope()) return request
def testListSourceCustomResourceDefinitions(self): """Test the list source CRDs api call.""" expected_request = ( self.crd_messages.AnthoseventsCustomresourcedefinitionsListRequest( parent=self._NamespaceRef( project='fake-project').RelativeName(), labelSelector='duck.knative.dev/source=true')) crds = [ self.crd_messages.CustomResourceDefinition(apiVersion='1') for _ in range(5) ] for crd in crds: arg_utils.SetFieldInMessage(crd, 'spec.names.kind', 'UnknownSourceKind') custom_resource_definition_versions = [ self.crd_messages.CustomResourceDefinitionVersion( name='v1', schema=self.crd_messages.CustomResourceValidation( openAPIV3Schema=self._SourceSchemaProperties( None, None))) ] crd.spec.versions = custom_resource_definition_versions arg_utils.SetFieldInMessage(crds[0], 'spec.names.kind', 'CloudPubSubSource') expected_response = self.crd_messages.ListCustomResourceDefinitionsResponse( items=crds) self.mock_crd_client.customresourcedefinitions.List.Expect( expected_request, expected_response) source_crds = self.anthosevents_client.ListSourceCustomResourceDefinitions( ) self.assertEqual(1, len(source_crds)) self.assertEqual(source_crds[0].source_kind, 'CloudPubSubSource') self.assertEqual(source_crds[0].source_version, 'v1')
def testListSourceCustomResourceDefinitions(self): """Test the list source CRDs api call.""" expected_request = ( self.crd_messages.RunCustomresourcedefinitionsListRequest( parent=self._NamespaceRef(project='fake-project').RelativeName(), labelSelector='duck.knative.dev/source=true')) crds = [ self.crd_messages.CustomResourceDefinition(apiVersion='1') for _ in range(5) ] for crd in crds: arg_utils.SetFieldInMessage(crd, 'spec.names.kind', 'UnknownSourceKind') arg_utils.SetFieldInMessage(crds[0], 'spec.names.kind', 'CloudPubSubSource') expected_response = self.crd_messages.ListCustomResourceDefinitionsResponse( items=crds) self.mock_crd_client.customresourcedefinitions.List.Expect( expected_request, expected_response) source_crds = self.eventflow_client.ListSourceCustomResourceDefinitions() self.assertEqual(1, len(source_crds)) self.assertEqual(source_crds[0].source_kind, 'CloudPubSubSource')
def UpdateNamespaceWithLabels(self, namespace_ref, labels): """Updates an existing namespace with the labels provided. If a label already exists, this will replace that label with the value provided. This is akin to specifying --overwrite with kubectl. Args: namespace_ref: googlecloudsdk.core.resources.Resource, namespace resource. Note that this should be of the collection "run.api.v1.namespaces" and *not* "run.namespaces". labels: map[str, str] of label keys and values to patch. Returns: Namespace that was patched. """ messages = self._core_client.MESSAGES_MODULE namespace = messages.Namespace() arg_utils.SetFieldInMessage(namespace, _METADATA_LABELS_FIELD, labels) old_additional_headers = {} try: # We need to specify a special content-type for k8s to accept our PATCH. # However, this appears to only be settable at the client level, not at # the request level. So we'll update the client for our request, and the # set it back to the old value afterwards. old_additional_headers = self._core_client.additional_http_headers additional_headers = old_additional_headers.copy() additional_headers['content-type'] = 'application/merge-patch+json' self._core_client.additional_http_headers = additional_headers except AttributeError: # TODO(b/150229881): Remove this try/except block and below. # The mocked test client does not have an additional_http_headers attr # So we won't be able to test this part. pass with metrics.RecordDuration(metric_names.UPDATE_NAMESPACE): try: request = messages.RunApiV1NamespacesPatchRequest( name=namespace_ref.RelativeName(), namespace=namespace, updateMask=_METADATA_LABELS_FIELD) response = self._core_client.api_v1_namespaces.Patch(request) finally: try: self._core_client.additional_http_headers = old_additional_headers except AttributeError: # The mocked test client does not have an additional_http_headers attr pass return response
def _ParseArguments(self, message, namespace): """Parse all the arguments from the namespace into the message object. Args: message: A constructed apitools message object to inject the value into. namespace: The argparse namespace. """ message_type = self.method.GetRequestType() for attributes in self.arg_info: value = arg_utils.GetFromNamespace(namespace, attributes.arg_name) if value is None: continue field = arg_utils.GetFieldFromMessage(message_type, attributes.api_field) value = arg_utils.ConvertValue(field, value, attributes) arg_utils.SetFieldInMessage(message, attributes.api_field, value)