def undeploy_model( self, request: endpoint_service.UndeployModelRequest = None, *, endpoint: str = None, deployed_model_id: str = None, traffic_split: Sequence[ endpoint_service.UndeployModelRequest.TrafficSplitEntry ] = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gac_operation.Operation: r"""Undeploys a Model from an Endpoint, removing a DeployedModel from it, and freeing all resources it's using. Args: request (google.cloud.aiplatform_v1beta1.types.UndeployModelRequest): The request object. Request message for [EndpointService.UndeployModel][google.cloud.aiplatform.v1beta1.EndpointService.UndeployModel]. endpoint (str): Required. The name of the Endpoint resource from which to undeploy a Model. Format: ``projects/{project}/locations/{location}/endpoints/{endpoint}`` This corresponds to the ``endpoint`` field on the ``request`` instance; if ``request`` is provided, this should not be set. deployed_model_id (str): Required. The ID of the DeployedModel to be undeployed from the Endpoint. This corresponds to the ``deployed_model_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. traffic_split (Sequence[google.cloud.aiplatform_v1beta1.types.UndeployModelRequest.TrafficSplitEntry]): If this field is provided, then the Endpoint's [traffic_split][google.cloud.aiplatform.v1beta1.Endpoint.traffic_split] will be overwritten with it. If last DeployedModel is being undeployed from the Endpoint, the [Endpoint.traffic_split] will always end up empty when this call returns. A DeployedModel will be successfully undeployed only if it doesn't have any traffic assigned to it when this method executes, or if this field unassigns any traffic to it. This corresponds to the ``traffic_split`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.UndeployModelResponse` Response message for [EndpointService.UndeployModel][google.cloud.aiplatform.v1beta1.EndpointService.UndeployModel]. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([endpoint, deployed_model_id, traffic_split]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) # Minor optimization to avoid making a copy if the user passes # in a endpoint_service.UndeployModelRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, endpoint_service.UndeployModelRequest): request = endpoint_service.UndeployModelRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if endpoint is not None: request.endpoint = endpoint if deployed_model_id is not None: request.deployed_model_id = deployed_model_id if traffic_split is not None: request.traffic_split = traffic_split # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.undeploy_model] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("endpoint", request.endpoint),)), ) # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Wrap the response in an operation future. response = gac_operation.from_gapic( response, self._transport.operations_client, endpoint_service.UndeployModelResponse, metadata_type=endpoint_service.UndeployModelOperationMetadata, ) # Done; return the response. return response
async def undeploy_model( self, request: Union[endpoint_service.UndeployModelRequest, dict] = None, *, endpoint: str = None, deployed_model_id: str = None, traffic_split: Sequence[ endpoint_service.UndeployModelRequest.TrafficSplitEntry ] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Undeploys a Model from an Endpoint, removing a DeployedModel from it, and freeing all resources it's using. .. code-block:: python from google.cloud import aiplatform_v1beta1 def sample_undeploy_model(): # Create a client client = aiplatform_v1beta1.EndpointServiceClient() # Initialize request argument(s) request = aiplatform_v1beta1.UndeployModelRequest( endpoint="endpoint_value", deployed_model_id="deployed_model_id_value", ) # Make the request operation = client.undeploy_model(request=request) print("Waiting for operation to complete...") response = operation.result() # Handle the response print(response) Args: request (Union[google.cloud.aiplatform_v1beta1.types.UndeployModelRequest, dict]): The request object. Request message for [EndpointService.UndeployModel][google.cloud.aiplatform.v1beta1.EndpointService.UndeployModel]. endpoint (:class:`str`): Required. The name of the Endpoint resource from which to undeploy a Model. Format: ``projects/{project}/locations/{location}/endpoints/{endpoint}`` This corresponds to the ``endpoint`` field on the ``request`` instance; if ``request`` is provided, this should not be set. deployed_model_id (:class:`str`): Required. The ID of the DeployedModel to be undeployed from the Endpoint. This corresponds to the ``deployed_model_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. traffic_split (:class:`Sequence[google.cloud.aiplatform_v1beta1.types.UndeployModelRequest.TrafficSplitEntry]`): If this field is provided, then the Endpoint's [traffic_split][google.cloud.aiplatform.v1beta1.Endpoint.traffic_split] will be overwritten with it. If last DeployedModel is being undeployed from the Endpoint, the [Endpoint.traffic_split] will always end up empty when this call returns. A DeployedModel will be successfully undeployed only if it doesn't have any traffic assigned to it when this method executes, or if this field unassigns any traffic to it. This corresponds to the ``traffic_split`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.aiplatform_v1beta1.types.UndeployModelResponse` Response message for [EndpointService.UndeployModel][google.cloud.aiplatform.v1beta1.EndpointService.UndeployModel]. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([endpoint, deployed_model_id, traffic_split]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = endpoint_service.UndeployModelRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if endpoint is not None: request.endpoint = endpoint if deployed_model_id is not None: request.deployed_model_id = deployed_model_id if traffic_split: request.traffic_split.update(traffic_split) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.undeploy_model, default_timeout=5.0, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("endpoint", request.endpoint),)), ) # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) # Wrap the response in an operation future. response = operation_async.from_gapic( response, self._client._transport.operations_client, endpoint_service.UndeployModelResponse, metadata_type=endpoint_service.UndeployModelOperationMetadata, ) # Done; return the response. return response