def _PossiblyPromote(self, all_services, new_version): """Promotes the new version to default (if specified by the user). Args: all_services: dict of service ID to service_util.Service objects corresponding to all pre-existing services (used to determine how to promote this version to receive all traffic, if applicable). new_version: version_util.Version describing where to deploy the service Raises: VersionPromotionError: if the version could not successfully promoted """ if self.deploy_options.promote: try: version_util.PromoteVersion( all_services, new_version, self.api_client, self.deploy_options.stop_previous_version) except apitools_exceptions.HttpError as err: err_str = str(core_api_exceptions.HttpException(err)) raise VersionPromotionError(err_str) elif self.deploy_options.stop_previous_version: log.info('Not stopping previous version because new version was ' 'not promoted.')
def Cancel(job_id, project_id=None, region_id=None): """Cancels a job by calling the Jobs.Update method. Args: job_id: Identifies a single job. project_id: The project which owns the job. region_id: The regional endpoint where the job lives. Returns: (Job) """ project_id = project_id or GetProject() region_id = region_id or DATAFLOW_API_DEFAULT_REGION job = GetMessagesModule().Job(requestedState=(GetMessagesModule( ).Job.RequestedStateValueValuesEnum.JOB_STATE_CANCELLED)) request = GetMessagesModule( ).DataflowProjectsLocationsJobsUpdateRequest(jobId=job_id, location=region_id, projectId=project_id, job=job) try: return Jobs.GetService().Update(request) except apitools_exceptions.HttpError as error: raise exceptions.HttpException(error)
def List(job_id=None, project_id=None, region_id=None): """Calls the Dataflow Snapshots.List method. Args: job_id: If specified, only snapshots associated with the job will be returned. project_id: The project that owns the snapshot. region_id: The regional endpoint of the snapshot. Returns: (ListSnapshotsResponse) """ project_id = project_id or GetProject() # TODO(b/139889563): Remove default when args region is changed to required region_id = region_id or DATAFLOW_API_DEFAULT_REGION request = GetMessagesModule( ).DataflowProjectsLocationsSnapshotsListRequest(jobId=job_id, location=region_id, projectId=project_id) try: return Snapshots.GetService().List(request) except apitools_exceptions.HttpError as error: raise exceptions.HttpException(error)
def Get(job_id, project_id=None, region_id=None, start_time=None): """Calls the Dataflow Metrics.Get method. Args: job_id: The job to get messages for. project_id: The project which owns the job. region_id: The regional endpoint of the job. start_time: Return only metric data that has changed since this time. Default is to return all information about all metrics for the job. Returns: (MetricUpdate) """ project_id = project_id or GetProject() region_id = region_id or DATAFLOW_API_DEFAULT_REGION request = GetMessagesModule( ).DataflowProjectsLocationsJobsGetMetricsRequest(jobId=job_id, location=region_id, projectId=project_id, startTime=start_time) try: return Metrics.GetService().GetMetrics(request) except apitools_exceptions.HttpError as error: raise exceptions.HttpException(error)
def GetApiEnablementInfo(exception): """Returns the API Enablement info or None if prompting is not necessary. Args: exception (apitools_exceptions.HttpError): Exception if an error occurred. Returns: tuple[str]: The project, service token, exception tuple to be used for prompting to enable the API. Raises: api_exceptions.HttpException: If gcloud should not prompt to enable the API. """ parsed_error = api_exceptions.HttpException(exception) if (parsed_error.payload.status_code != API_ENABLEMENT_ERROR_EXPECTED_STATUS_CODE): return None enablement_info = api_enablement.GetApiEnablementInfo( parsed_error.payload.status_message) if enablement_info: return enablement_info + (parsed_error, ) return None
def List(page_size, page_token, filter_rule): """Calls the Procurement Consumer FreeTrials.List method. Args: page_size: Max size of records to be retrieved in page. page_token: Token to specify page in list. filter_rule: The filter that can be used to limit the the result. Returns: List of Free Trials and next page token if applicable. """ project_name = 'projects/%s' % properties.VALUES.core.project.GetOrFail( ) request = GetMessagesModule( ).CloudcommerceconsumerprocurementProjectsFreeTrialsListRequest( parent=project_name, pageSize=page_size, pageToken=page_token, filter=filter_rule) try: return FreeTrials.GetService().List(request) except apitools_exceptions.HttpError as error: raise exceptions.HttpException(error)
def Drain(job_id, project_id=None, region_id=None): """Drains a job by calling the Jobs.Update method. Args: job_id: Identifies a single job. project_id: The project which owns the job. region_id: The regional endpoint where the job lives. Returns: (Job) """ project_id = project_id or GetProject() # TODO(b/139889563): Remove default when args region is changed to required region_id = region_id or DATAFLOW_API_DEFAULT_REGION job = GetMessagesModule().Job( requestedState=(GetMessagesModule().Job.RequestedStateValueValuesEnum .JOB_STATE_DRAINED)) request = GetMessagesModule().DataflowProjectsLocationsJobsUpdateRequest( jobId=job_id, location=region_id, projectId=project_id, job=job) try: return Jobs.GetService().Update(request) except apitools_exceptions.HttpError as error: raise exceptions.HttpException(error)
def BatchRequests(self, requests, errors_to_collect=None): """Issues batch request for given set of requests. Args: requests: list(tuple(service, method, payload)), where service is apitools.base.py.base_api.BaseApiService, method is str, method name, e.g. 'Get', 'CreateInstance', payload is a subclass of apitools.base.protorpclite.messages.Message. errors_to_collect: list, output only, can be None, contains instances of api_exceptions.HttpException for each request with exception. Returns: list of responses, matching list of requests. Some responses can be errors. """ batch_request = batch.BatchApiRequest(batch_url=self._batch_url) for service, method, request in requests: batch_request.Add(service, method, request) payloads = batch_request.Execute(self._client.http, max_batch_size=_BATCH_SIZE_LIMIT) responses = [] errors = errors_to_collect if errors_to_collect is not None else [] for payload in payloads: if payload.is_error: if isinstance(payload.exception, apitools_exceptions.HttpError): errors.append( api_exceptions.HttpException(payload.exception)) else: errors.append(Error(payload.exception.message)) responses.append(payload.response) return responses
def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. """ sink_ref = self.context['sink_reference'] if args.log: sink_description = 'log sink [%s] from [%s]' % ( sink_ref.sinksId, sink_ref.logsId) elif args.service: sink_description = 'log-service sink [%s] from [%s]' % ( sink_ref.sinksId, sink_ref.logServicesId) else: sink_description = 'project sink [%s]' % sink_ref.sinksId if not console_io.PromptContinue('Really delete %s?' % sink_description): raise calliope_exceptions.ToolException('action canceled by user') try: if args.log: self.DeleteLogSink() elif args.service: self.DeleteLogServiceSink() else: self.DeleteProjectSink() log.DeletedResource(sink_ref) except apitools_exceptions.HttpError as error: project_sink = not args.log and not args.service # Suggest the user to add --log or --log-service flag. if project_sink and exceptions.HttpException( error).payload.status_code == 404: log.status.Print(('Project sink was not found. ' 'Did you forget to add --log or --log-service flag?')) raise error
def MakeGetAssetsHistoryHttpRequests(args, api_version=DEFAULT_API_VERSION): """Manually make the get assets history request.""" http_client = http.Http() query_params = [('assetNames', asset_name) for asset_name in args.asset_names or []] query_params.extend([ ('contentType', ContentTypeTranslation(args.content_type)), ('readTimeWindow.startTime', times.FormatDateTime(args.start_time)) ]) if args.IsSpecified('end_time'): query_params.extend([('readTimeWindow.endTime', times.FormatDateTime(args.end_time))]) parent = asset_utils.GetParentName(args.organization, args.project) url_base = '{0}/{1}/{2}:{3}'.format(BASE_URL, api_version, parent, 'batchGetAssetsHistory') url_query = six.moves.urllib.parse.urlencode(query_params) url = '?'.join([url_base, url_query]) response, raw_content = http_client.request(uri=url, headers=_HEADERS) content = core_encoding.Decode(raw_content) if response['status'] != '200': http_error = api_exceptions.HttpError(response, content, url) raise exceptions.HttpException(http_error) response_message_class = GetMessages( api_version).BatchGetAssetsHistoryResponse try: history_response = encoding.JsonToMessage(response_message_class, content) except ValueError as e: err_msg = ('Failed receiving proper response from server, cannot' 'parse received assets. Error details: ' + str(e)) raise MessageDecodeError(err_msg) for asset in history_response.assets: yield asset
def FetchResourcesAndOutputs(client, messages, project, deployment_name): """Returns a ResourcesAndOutputs object for a deployment.""" try: # Fetch a list of the previewed or updated resources. response = client.resources.List( messages.DeploymentmanagerResourcesListRequest( project=project, deployment=deployment_name, )) if response.resources: resources = LimitResourcesToDisplay(response.resources) else: resources = [] deployment_response = client.deployments.Get( messages.DeploymentmanagerDeploymentsGetRequest( project=project, deployment=deployment_name, )) outputs = [] manifest = ExtractManifestName(deployment_response) if manifest: manifest_response = client.manifests.Get( messages.DeploymentmanagerManifestsGetRequest( project=project, deployment=deployment_name, manifest=manifest, )) if manifest_response.layout: outputs = FlattenLayoutOutputs(manifest_response.layout) # TODO(b/36049939): Pagination b/28298504 return ResourcesAndOutputs(resources, outputs) except apitools_exceptions.HttpError as error: raise api_exceptions.HttpException(error, HTTP_ERROR_FORMAT)
def IsServiceEnabled(project_id, service_name): """Return true if the service is enabled. Args: project_id: The ID of the project we want to query. service_name: The name of the service. Raises: services_util.ListServicesPermissionDeniedException: if a 403 or 404 error is returned by the List request. api_lib_exceptions.HttpException: Another miscellaneous error with the listing service. Returns: True if the service is enabled, false otherwise. """ client = services_util.GetClientInstance() # Get the list of enabled services. request = services_util.GetEnabledListRequest(project_id) try: for service in list_pager.YieldFromList( client.services, request, batch_size_attribute='pageSize', field='services'): # If the service is present in the list of enabled services, return # True, otherwise return False if service.serviceName.lower() == service_name.lower(): return True except exceptions.HttpError as e: if e.status_code in [403, 404]: msg = json.loads(e.content).get('error', {}).get('message', '') raise services_util.ListServicesPermissionDeniedException(msg) raise api_lib_exceptions.HttpException(e) return False
def Create(environment_ref, flags, is_composer_v1): """Calls the Composer Environments.Create method. Args: environment_ref: Resource, the Composer environment resource to create. flags: CreateEnvironmentFlags, the flags provided for environment creation. is_composer_v1: boolean representing if creation request is for Composer 1.*.* image versions. Returns: Operation: the operation corresponding to the creation of the environment """ messages = api_util.GetMessagesModule(release_track=flags.release_track) # Builds environment message and attaches the configuration environment = messages.Environment(name=environment_ref.RelativeName()) environment.config = _CreateConfig(messages, flags, is_composer_v1) if flags.labels: environment.labels = api_util.DictToMessage( flags.labels, messages.Environment.LabelsValue) try: return GetService(release_track=flags.release_track).Create( api_util.GetMessagesModule(release_track=flags.release_track). ComposerProjectsLocationsEnvironmentsCreateRequest( environment=environment, parent=environment_ref.Parent().RelativeName())) except apitools_exceptions.HttpForbiddenError as e: raise exceptions.HttpException( e, error_format= ('Creation operation failed because of lack of proper ' 'permissions. Please, refer to ' 'https://cloud.google.com/composer/docs/how-to/managing/creating ' 'and Composer Creation Troubleshooting pages to resolve this issue.' ))
def Create(provider_id, product_external_name): """Calls the Procurement Consumer FreeTrials.Create method. Args: provider_id: Id of the provider. product_external_name: Name of the product. Returns: (Operation) """ project_name = 'projects/%s' % properties.VALUES.core.project.GetOrFail( ) provider_name = 'providers/%s' % provider_id free_trial = GetMessagesModule( ).GoogleCloudCommerceConsumerProcurementV1alpha1FreeTrial( provider=provider_name, productExternalName=product_external_name) request = GetMessagesModule( ).CloudcommerceconsumerprocurementProjectsFreeTrialsCreateRequest( parent=project_name, googleCloudCommerceConsumerProcurementV1alpha1FreeTrial=free_trial) try: return FreeTrials.GetService().Create(request) except apitools_exceptions.HttpError as error: raise exceptions.HttpException(error)
def _Run(args, legacy_output=False): """Deletes one or more topics.""" client = topics.TopicsClient() failed = [] for topic_ref in args.CONCEPTS.topic.Parse(): try: result = client.Delete(topic_ref) except api_ex.HttpError as error: exc = exceptions.HttpException(error) log.DeletedResource(topic_ref.RelativeName(), kind='topic', failed=exc.payload.status_message) failed.append(topic_ref.topicsId) continue topic = client.messages.Topic(name=topic_ref.RelativeName()) if legacy_output: result = util.TopicDisplayDict(topic) log.DeletedResource(topic_ref.RelativeName(), kind='topic') yield result if failed: raise util.RequestsFailedError(failed, 'delete')
def Snapshot(job_id, project_id=None, region_id=None, ttl='604800s'): """Takes a snapshot of a job via the Jobs.Snapshot method. Args: job_id: Identifies a single job. project_id: The project which owns the job. region_id: The regional endpoint where the job lives. ttl: The ttl for the snapshot. Returns: (Snapshot) """ project_id = project_id or GetProject() region_id = region_id or DATAFLOW_API_DEFAULT_REGION request = GetMessagesModule( ).DataflowProjectsLocationsJobsSnapshotRequest( jobId=job_id, location=region_id, projectId=project_id, snapshotJobRequest=GetMessagesModule().SnapshotJobRequest( location=region_id, ttl=ttl)) try: return Jobs.GetService().Snapshot(request) except apitools_exceptions.HttpError as error: raise exceptions.HttpException(error)
def testHttpExceptionErrorFormatV2ContentVsPayload(self): err = http_error.MakeDetailedHttpError( 400, url='https://mock.googleapis.com/v1/projects/your-stuff/junk/mine', content={ 'details': [ { '@type': 'type.googleapis.com/google.rpc.Quote', 'detail': "We're in a tight spot.", }, { '@type': 'type.googleapis.com/google.rpc.Quip', 'detail': "You're gonna need a bigger boat.", }, ], }, details=http_error.ExampleErrorDetails(), ) exc = exceptions.HttpException( err, 'Error [{status_code}] {status_message}' '{error.details.detail?\nerror.details.detail:\n{?}}' '{.details.detail?\n.details.detail:\n{?}}' '{details.detail?\ndetails.detail:\n{?}}') self.assertEqual( """\ Error [400] Invalid request. error.details.detail: - '[ORIGINAL ERROR] error_type::error: Error details.\\n\ And then more details.' .details.detail: - We're in a tight spot. - You're gonna need a bigger boat. details.detail: - '[ORIGINAL ERROR] error_type::error: Error details.\\n\ And then more details.'""", exc.message)
def TagDataAsset(data_asset, annotation, load_from, remove): """Tag a data asset with annotation or remove existing annotation. Args: data_asset: list of data asset resource names. annotation: full annotation <taxonomy::annotation>. load_from: path to a file with (dataasset,annotation) pairs remove: if true, remove existing annotation instead. Raises: exceptions.HttpException: on unknown errors. Returns: It always returns 0 if no exceptions raised. """ if load_from: # TODO(b/32858676): Implemented load from file. raise NotImplementedError() if remove: # TODO(b/32858676): Implement annotation removal. raise NotImplementedError() try: for data_asset_path in data_asset: tagging.Apply(data_asset_path, annotation) except apitools_exceptions.HttpError as e: exc = exceptions.HttpException(e) if exc.payload.status_code == 404: # status_code specific error message exc.error_format = '{api_name}: {resource_name} not found.' else: # override default error message exc.error_format = 'Unknown error. Status code {status_code}.' raise exc return 0
def ReraiseError(err, klass): """Transform and re-raise error helper.""" core_exceptions.reraise(klass(api_lib_exceptions.HttpException(err)))
def testHttpException404Message(self): err = http_error.MakeHttpError(404) exc = exceptions.HttpException(err) self.assertEqual( 'Resource not found API reason: Resource not found.', exc.payload.message)
def CopyFileToGCS(self, local_path, target_obj_ref): """Upload a file to the GCS results bucket using the storage API. Args: local_path: str, the path of the file to upload. File must be on the local filesystem. target_obj_ref: storage_util.ObjectReference, the path of the file on GCS. Returns: Object, the storage object that was copied to. Raises: BucketNotFoundError if the user-specified bucket does not exist. UploadError if the file upload is not successful. exceptions.BadFileException if the uploaded file size does not match the size of the local file. """ file_size = _GetFileSize(local_path) src_obj = self.messages.Object(size=file_size) mime_type = _GetMimetype(local_path) chunksize = self._GetChunkSize() upload = transfer.Upload.FromFile(local_path, mime_type=mime_type, chunksize=chunksize) insert_req = self.messages.StorageObjectsInsertRequest( bucket=target_obj_ref.bucket, name=target_obj_ref.object, object=src_obj) gsc_path = '{bucket}/{target_path}'.format( bucket=target_obj_ref.bucket, target_path=target_obj_ref.object, ) log.info('Uploading [{local_file}] to [{gcs}]'.format( local_file=local_path, gcs=gsc_path)) try: response = self.client.objects.Insert(insert_req, upload=upload) except api_exceptions.HttpNotFoundError: raise BucketNotFoundError( 'Could not upload file: [{bucket}] bucket does not exist.'. format(bucket=target_obj_ref.bucket)) except api_exceptions.HttpError as err: log.debug( 'Could not upload file [{local_file}] to [{gcs}]: {e}'.format( local_file=local_path, gcs=gsc_path, e=http_exc.HttpException(err))) raise UploadError( '{code} Could not upload file [{local_file}] to [{gcs}]: {message}' .format(code=err.status_code, local_file=local_path, gcs=gsc_path, message=http_exc.HttpException( err, error_format='{status_message}'))) if response.size != file_size: log.debug('Response size: {0} bytes, but local file is {1} bytes.'. format(response.size, file_size)) raise exceptions.BadFileException( 'Cloud storage upload failure. Uploaded file does not match local ' 'file: {0}. Please retry.'.format(local_path)) return response
def CreateJobFromFlexTemplate(template_args=None): """Call the create job from flex template APIs. Args: template_args: Arguments for create template. Returns: (Job) """ params_list = Templates.__ConvertDictArguments( template_args.parameters, Templates.FLEX_TEMPLATE_PARAMETERS_VALUE) transform_mapping_list = Templates.__ConvertDictArguments( template_args.transform_name_mappings, Templates.FLEX_TEMPLATE_TRANSFORM_NAME_MAPPING_VALUE) transform_mappings = None streaming_update = None if template_args.streaming_update: streaming_update = template_args.streaming_update if transform_mapping_list: transform_mappings = Templates.FLEX_TEMPLATE_TRANSFORM_NAME_MAPPING_VALUE( additionalProperties=transform_mapping_list) user_labels_list = Templates.__ConvertDictArguments( template_args.additional_user_labels, Templates.FLEX_TEMPLATE_USER_LABELS_VALUE) # TODO(b/139889563): Remove default when args region is changed to required region_id = template_args.region_id or DATAFLOW_API_DEFAULT_REGION ip_private = Templates.IP_CONFIGURATION_ENUM_VALUE.WORKER_IP_PRIVATE ip_configuration = ip_private if template_args.disable_public_ips else None flexrs_goal = None if template_args.flexrs_goal: if template_args.flexrs_goal == 'SPEED_OPTIMIZED': flexrs_goal = Templates.FLEXRS_GOAL_ENUM_VALUE.FLEXRS_SPEED_OPTIMIZED elif template_args.flexrs_goal == 'COST_OPTIMIZED': flexrs_goal = Templates.FLEXRS_GOAL_ENUM_VALUE.FLEXRS_COST_OPTIMIZED body = Templates.LAUNCH_FLEX_TEMPLATE_REQUEST( launchParameter=Templates.FLEX_TEMPLATE_PARAMETER( jobName=template_args.job_name, containerSpecGcsPath=template_args.gcs_location, environment=Templates.FLEX_TEMPLATE_ENVIRONMENT( serviceAccountEmail=template_args.service_account_email, maxWorkers=template_args.max_workers, numWorkers=template_args.num_workers, network=template_args.network, subnetwork=template_args.subnetwork, machineType=template_args.worker_machine_type, tempLocation=template_args.staging_location, kmsKeyName=template_args.kms_key_name, ipConfiguration=ip_configuration, workerRegion=template_args.worker_region, workerZone=template_args.worker_zone, enableStreamingEngine=template_args.enable_streaming_engine, flexrsGoal=flexrs_goal, additionalExperiments=( template_args.additional_experiments if template_args.additional_experiments else []), additionalUserLabels=Templates.FLEX_TEMPLATE_USER_LABELS_VALUE( additionalProperties=user_labels_list ) if user_labels_list else None), update=streaming_update, transformNameMappings=transform_mappings, parameters=Templates.FLEX_TEMPLATE_PARAMETERS_VALUE( additionalProperties=params_list) if params_list else None)) request = GetMessagesModule( ).DataflowProjectsLocationsFlexTemplatesLaunchRequest( projectId=template_args.project_id or GetProject(), location=region_id, launchFlexTemplateRequest=body) try: return Templates.GetFlexTemplateService().Launch(request) except apitools_exceptions.HttpError as error: raise exceptions.HttpException(error)
def Run(self, args): """Run 'deployments delete'. Args: args: argparse.Namespace, The arguments that this command was invoked with. Returns: If --async=true, returns Operation to poll. Else, returns boolean indicating whether insert operation succeeded. Raises: HttpException: An http error response was received while executing api request. """ prompt_message = ('The following deployments will be deleted:\n- ' + '\n- '.join(args.deployment_name)) if not args.quiet: if not console_io.PromptContinue(message=prompt_message, default=False): raise exceptions.OperationError('Deletion aborted by user.') operations = [] errors = [] for deployment_name in args.deployment_name: deployment_ref = self.resources.Parse( deployment_name, params={'project': properties.VALUES.core.project.GetOrFail}, collection='deploymentmanager.deployments') try: operation = self.client.deployments.Delete( self.messages.DeploymentmanagerDeploymentsDeleteRequest( project=dm_base.GetProject(), deployment=deployment_ref.deployment, deletePolicy=(Delete._delete_policy_flag_map. GetEnumForChoice(args.delete_policy)), ) ) if args.async: operations.append(operation) else: op_name = operation.name try: # TODO(b/62720778): Refactor to use waiter.CloudOperationPoller operation = dm_write.WaitForOperation( self.client, self.messages, op_name, 'delete', dm_base.GetProject(), timeout=OPERATION_TIMEOUT) dm_util.LogOperationStatus(operation, 'Delete') except exceptions.OperationError as e: errors.append(exceptions.OperationError( u'Delete operation {0} failed.\n{1}'.format(op_name, e))) completed_operation = self.client.operations.Get( self.messages.DeploymentmanagerOperationsGetRequest( project=dm_base.GetProject(), operation=op_name, ) ) operations.append(completed_operation) except apitools_exceptions.HttpError as error: errors.append(api_exceptions.HttpException( error, dm_api_util.HTTP_ERROR_FORMAT)) if errors: raise core_exceptions.MultiError(errors) return operations
def testHttpException500Message(self): err = http_error.MakeHttpError(500) exc = exceptions.HttpException(err) self.assertEqual( 'Internal server error API reason: Internal server error.', exc.payload.message)
def Run(self, args): """Run 'deployments delete'. Args: args: argparse.Namespace, The arguments that this command was invoked with. Returns: If --async=true, returns Operation to poll. Else, returns boolean indicating whether insert operation succeeded. Raises: HttpException: An http error response was received while executing api request. """ prompt_message = ('The following deployments will be deleted:\n- ' + '\n- '.join(args.deployment_name)) if not args.quiet: if not console_io.PromptContinue(message=prompt_message, default=False): raise exceptions.OperationError('Deletion aborted by user.') operations = [] for deployment_name in args.deployment_name: try: operation = dm_base.GetClient().deployments.Delete( dm_base.GetMessages( ).DeploymentmanagerDeploymentsDeleteRequest( project=dm_base.GetProject(), deployment=deployment_name, deletePolicy=(dm_base.GetMessages( ).DeploymentmanagerDeploymentsDeleteRequest. DeletePolicyValueValuesEnum( args.delete_policy)), )) except apitools_exceptions.HttpError as error: raise api_exceptions.HttpException( error, dm_v2_util.HTTP_ERROR_FORMAT) if args. async: operations.append(operation) else: op_name = operation.name try: dm_write.WaitForOperation(op_name, 'delete', dm_base.GetProject(), timeout=OPERATION_TIMEOUT) log.status.Print('Delete operation ' + op_name + ' completed successfully.') except exceptions.OperationError as e: log.error(u'Delete operation {0} failed.\n{1}'.format( op_name, e)) except apitools_exceptions.HttpError as error: raise api_exceptions.HttpException( error, dm_v2_util.HTTP_ERROR_FORMAT) try: completed_operation = dm_base.GetClient().operations.Get( dm_base.GetMessages( ).DeploymentmanagerOperationsGetRequest( project=dm_base.GetProject(), operation=op_name, )) except apitools_exceptions.HttpError as error: raise api_exceptions.HttpException( error, dm_v2_util.HTTP_ERROR_FORMAT) operations.append(completed_operation) return operations
def Create(project_id=None, region_id=None, gcs_location=None, staging_location=None, parameters=None, job_name=None, service_account_email=None, zone=None, max_workers=None, num_workers=None, worker_machine_type=None, network=None, subnetwork=None, dataflow_kms_key=None): """Calls the Dataflow Templates.CreateFromJob method. Args: project_id: The project which owns the job. region_id: The regional endpoint where the job lives. gcs_location: The location of the template. staging_location: The location to stage temporary files. parameters: Parameters to pass to the template. job_name: The name to assign to the job. service_account_email: The service account to run the workers as. zone: The zone to run the workers in. max_workers: The maximum number of workers to run. num_workers: The initial number of workers to use. worker_machine_type: The type of machine to use for workers. network: The network for launching instances to run your pipeline. subnetwork: The subnetwork for launching instances to run your pipeline. dataflow_kms_key: The Cloud KMS key to protect the job resources. Returns: (Job) """ params_list = [] for k, v in six.iteritems(parameters) if parameters else {}: params_list.append( Templates.PARAMETERS_VALUE.AdditionalProperty(key=k, value=v)) region_id = region_id or DATAFLOW_API_DEFAULT_REGION body = Templates.CREATE_REQUEST( gcsPath=gcs_location, jobName=job_name, location=region_id, environment=GetMessagesModule().RuntimeEnvironment( serviceAccountEmail=service_account_email, zone=zone, maxWorkers=max_workers, numWorkers=num_workers, network=network, subnetwork=subnetwork, machineType=worker_machine_type, tempLocation=staging_location, kmsKeyName=dataflow_kms_key), parameters=Templates.PARAMETERS_VALUE( additionalProperties=params_list) if parameters else None) request = GetMessagesModule( ).DataflowProjectsLocationsTemplatesCreateRequest( projectId=project_id or GetProject(), location=region_id, createJobFromTemplateRequest=body) try: return Templates.GetService().Create(request) except apitools_exceptions.HttpError as error: raise exceptions.HttpException(error)
def testHttpException400FormatAllUtf8(self): err = http_error.MakeHttpError( 400, url='https://mock.googleapis.com/v1/projects/your-stuff/junk/mine', content={ 'stuff': [ 'Ṳᾔḯ¢◎ⅾℯ', ], 'debugInfo': { 'stackTrace': [ 'file-1:line-1: Ṳᾔḯ¢◎ⅾℯ call-1', 'file-2:line-2: Ṳᾔḯ¢◎ⅾℯ call-2', ], 'message': [ 'Memory fault: Ṳᾔḯ¢◎ⅾℯ dumped', ], }, }, message='A Ṳᾔḯ¢◎ⅾℯ error somewhere. Try and find it.', reason='A Ṳᾔḯ¢◎ⅾℯ error somewhere. Find and try it.', ) exc = exceptions.HttpException(err, self._ERROR_FORMAT_ALL) # The expected value contains escaped unicode values for the JSON field # values because whoever generated the JSON data represented unicode # characters as C-style escapes. Other field values are unicode strings # containing valid unicode characters, so escape representation is not # needed. The raw JSON values are checked in this test because the they are # always preserved in the payload. That way the caller can debug # dump/decode/parse bugs using the original raw encoding. expected = """api_name: <mock> api_version: <v1> content: <debugInfo: message: - 'Memory fault: Ṳᾔḯ¢◎ⅾℯ dumped' stackTrace: - 'file-1:line-1: Ṳᾔḯ¢◎ⅾℯ call-1' - 'file-2:line-2: Ṳᾔḯ¢◎ⅾℯ call-2' error: code: '400' errors: - domain: global message: A Ṳᾔḯ¢◎ⅾℯ error somewhere. Try and find it. reason: A Ṳᾔḯ¢◎ⅾℯ error somewhere. Find and try it. message: A Ṳᾔḯ¢◎ⅾℯ error somewhere. Try and find it. location: mock-location status: INVALID_ARGUMENT stuff: - Ṳᾔḯ¢◎ⅾℯ> error_info: <code: '400' errors: - domain: global message: A Ṳᾔḯ¢◎ⅾℯ error somewhere. Try and find it. reason: A Ṳᾔḯ¢◎ⅾℯ error somewhere. Find and try it. message: A Ṳᾔḯ¢◎ⅾℯ error somewhere. Try and find it.> instance_name: <> message: <A Ṳᾔḯ¢◎ⅾℯ error somewhere. Find and try it: A Ṳᾔḯ¢◎ⅾℯ error somewhere. Try and find it.> resource_name: <> status_code: <400> status_description: <A Ṳᾔḯ¢◎ⅾℯ error somewhere. Find and try it.> status_message: <A Ṳᾔḯ¢◎ⅾℯ error somewhere. Try and find it.> url: <https://mock.googleapis.com/v1/projects/your-stuff/junk/mine> """ actual = six.text_type(exc) self.maxDiff = None # pylint:disable=invalid-name self.assertEqual(expected, actual)
def testHttpExceptionErrorFormatV2AggregateWithMissingDescription(self): err = http_error.MakeDetailedHttpError( 400, url='https://mock.googleapis.com/v1/projects/your-stuff/junk/mine', content={ 'error': { 'code': 400, 'message': 'Precondition check failed.', 'status': 'FAILED_PRECONDITION', 'details': [ { '@type': 'type.googleapis.com/google.rpc.violations', 'violations': [ { 'type': 'type.googleapis.com/google.rpc.lien', 'subject': 'liens/123-456-abc', }, { 'type': 'type.googleapis.com/google.rpc.lien', 'subject': 'liens/123-456-abc', 'description': 'Remove the lien [1.2].', }, ], }, { '@type': 'type.googleapis.com/google.rpc.violations', 'violations': [ { 'type': 'type.googleapis.com/google.rpc.lien', 'subject': 'liens/123-456-xyz', 'description': 'Remove the lien [2.1].', }, { 'type': 'type.googleapis.com/google.rpc.lien', 'subject': 'liens/123-456-abc', }, ], }, ], }, }, details=http_error.ExampleErrorDetails(), ) exc = exceptions.HttpException( err, 'Error [{status_code}] {status_message}' '{details.violations.description?\n{?}}' ) self.assertEqual("""\ Error [400] Precondition check failed. - - Remove the lien [1.2]. - - Remove the lien [2.1].""", exc.message) exc = exceptions.HttpException(err) self.assertEqual("""\ Invalid request API reason: Precondition check failed. - '@type': type.googleapis.com/google.rpc.violations violations: - subject: liens/123-456-abc type: type.googleapis.com/google.rpc.lien - description: Remove the lien [1.2]. subject: liens/123-456-abc type: type.googleapis.com/google.rpc.lien - '@type': type.googleapis.com/google.rpc.violations violations: - description: Remove the lien [2.1]. subject: liens/123-456-xyz type: type.googleapis.com/google.rpc.lien - subject: liens/123-456-abc type: type.googleapis.com/google.rpc.lien""", exc.message)
def testHttpException666Message(self): err = http_error.MakeHttpError(666) exc = exceptions.HttpException(err) self.assertEqual( 'HTTPError 666', exc.payload.message)
def testHttpException504Message(self): err = http_error.MakeHttpError(504) exc = exceptions.HttpException(err) self.assertEqual( 'Deadline exceeded API reason: Deadline exceeded.', exc.payload.message)