def add(self, deployment_pb): try: deployment_spec = deployment_pb.spec deployment_spec.aws_lambda_operator_config.region = ( deployment_spec.aws_lambda_operator_config.region or get_default_aws_region()) if not deployment_spec.aws_lambda_operator_config.region: raise InvalidArgument('AWS region is missing') ensure_sam_available_or_raise() ensure_docker_available_or_raise() bento_pb = self.yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, )) if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3): raise BentoMLException( 'BentoML currently not support {} repository'.format( BentoUri.StorageType.Name(bento_pb.bento.uri.type))) return self._add(deployment_pb, bento_pb, bento_pb.bento.uri.uri) except BentoMLException as error: deployment_pb.state.state = DeploymentState.ERROR deployment_pb.state.error_message = f'Error: {str(error)}' return ApplyDeploymentResponse(status=error.status_proto, deployment=deployment_pb)
def apply(self, deployment_pb, yatai_service, prev_deployment=None): try: ensure_docker_available_or_raise() deployment_spec = deployment_pb.spec sagemaker_config = deployment_spec.sagemaker_operator_config if sagemaker_config is None: raise BentoMLDeploymentException('Sagemaker configuration is missing.') bento_pb = yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, ) ) if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3): raise BentoMLException( 'BentoML currently not support {} repository'.format( bento_pb.bento.uri.type ) ) return self._apply( deployment_pb, bento_pb, yatai_service, bento_pb.bento.uri.uri, prev_deployment, ) except BentoMLException as error: return ApplyDeploymentResponse(status=exception_to_return_status(error))
def add(self, deployment_pb): try: ensure_docker_available_or_raise() deployment_spec = deployment_pb.spec sagemaker_config = deployment_spec.sagemaker_operator_config if sagemaker_config is None: raise YataiDeploymentException('Sagemaker configuration is missing.') bento_pb = self.yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, ) ) if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3): raise BentoMLException( 'BentoML currently not support {} repository'.format( BentoUri.StorageType.Name(bento_pb.bento.uri.type) ) ) return self._add(deployment_pb, bento_pb, bento_pb.bento.uri.uri) except BentoMLException as error: deployment_pb.state.state = DeploymentState.ERROR deployment_pb.state.error_message = ( f'Error creating SageMaker deployment: {str(error)}' ) return ApplyDeploymentResponse( status=error.status_proto, deployment=deployment_pb )
def update(self, deployment_pb, previous_deployment): try: ensure_sam_available_or_raise() ensure_docker_available_or_raise() deployment_spec = deployment_pb.spec bento_pb = self.yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, ) ) if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3): raise BentoMLException( 'BentoML currently not support {} repository'.format( BentoUri.StorageType.Name(bento_pb.bento.uri.type) ) ) return self._update( deployment_pb, previous_deployment, bento_pb, bento_pb.bento.uri.uri ) except BentoMLException as error: deployment_pb.state.state = DeploymentState.ERROR deployment_pb.state.error_message = f'Error: {str(error)}' return ApplyDeploymentResponse( status=error.status_code, deployment_pb=deployment_pb )
def delete(self, deployment_pb, yatai_service=None): try: state = self.describe(deployment_pb, yatai_service).state if state.state != DeploymentState.RUNNING: message = ( 'Failed to delete, no active deployment {name}. ' 'The current state is {state}'.format( name=deployment_pb.name, state=DeploymentState.State.Name(state.state), ) ) return DeleteDeploymentResponse(status=Status.ABORTED(message)) deployment_spec = deployment_pb.spec aws_config = deployment_spec.aws_lambda_operator_config bento_pb = yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, ) ) bento_service_metadata = bento_pb.bento.bento_service_metadata # We are not validating api_name, because for delete, you don't # need them. api_names = ( [aws_config.api_name] if aws_config.api_name else [api.name for api in bento_service_metadata.apis] ) with TempDirectory() as serverless_project_dir: generate_aws_lambda_serverless_config( bento_service_metadata.env.python_version, deployment_pb.name, api_names, serverless_project_dir, aws_config.region, # BentoML deployment namespace is mapping to serverless `stage` # concept stage=deployment_pb.namespace, ) response = call_serverless_command(['remove'], serverless_project_dir) stack_name = '{name}-{namespace}'.format( name=deployment_pb.name, namespace=deployment_pb.namespace ) if "Serverless: Stack removal finished..." in response: status = Status.OK() elif "Stack '{}' does not exist".format(stack_name) in response: status = Status.NOT_FOUND( 'Deployment {} not found'.format(stack_name) ) else: status = Status.ABORTED() return DeleteDeploymentResponse(status=status) except BentoMLException as error: return DeleteDeploymentResponse(status=exception_to_return_status(error))
def describe(self, deployment_pb, yatai_service=None): try: deployment_spec = deployment_pb.spec aws_config = deployment_spec.aws_lambda_operator_config info_json = {'endpoints': []} bento_pb = yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, ) ) bento_service_metadata = bento_pb.bento.bento_service_metadata api_names = ( [aws_config.api_name] if aws_config.api_name else [api.name for api in bento_service_metadata.apis] ) try: cloud_formation_stack_result = boto3.client( 'cloudformation' ).describe_stacks( StackName='{name}-{ns}'.format( ns=deployment_pb.namespace, name=deployment_pb.name ) ) outputs = cloud_formation_stack_result.get('Stacks')[0]['Outputs'] except Exception as error: state = DeploymentState( state=DeploymentState.ERROR, error_message=str(error) ) state.timestamp.GetCurrentTime() return DescribeDeploymentResponse( status=Status.INTERNAL(str(error)), state=state ) base_url = '' for output in outputs: if output['OutputKey'] == 'ServiceEndpoint': base_url = output['OutputValue'] break if base_url: info_json['endpoints'] = [ base_url + '/' + api_name for api_name in api_names ] state = DeploymentState( state=DeploymentState.RUNNING, info_json=json.dumps(info_json) ) state.timestamp.GetCurrentTime() return DescribeDeploymentResponse(status=Status.OK(), state=state) except BentoMLException as error: return DescribeDeploymentResponse(status=exception_to_return_status(error))
def apply(self, deployment_pb, yatai_service, prev_deployment=None): try: deployment_spec = deployment_pb.spec gcp_config = deployment_spec.gcp_function_operator_config bento_pb = yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, )) if bento_pb.bento.uri.type != BentoUri.LOCAL: raise BentoMLException( 'BentoML currently only support local repository') else: bento_path = bento_pb.bento.uri.uri bento_service_metadata = bento_pb.bento.bento_service_metadata api_names = ([gcp_config.api_name] if gcp_config.api_name else [api.name for api in bento_service_metadata.apis]) ensure_deploy_api_name_exists_in_bento( [api.name for api in bento_service_metadata.apis], api_names) with TempDirectory() as serverless_project_dir: init_serverless_project_dir( serverless_project_dir, bento_path, deployment_pb.name, deployment_spec.bento_name, 'google-python', ) generate_gcp_function_main_py(deployment_spec.bento_name, api_names, serverless_project_dir) generate_gcp_function_serverless_config( deployment_pb.name, api_names, serverless_project_dir, gcp_config.region, # BentoML namespace is mapping to serverless stage. stage=deployment_pb.namespace, ) call_serverless_command(["deploy"], serverless_project_dir) res_deployment_pb = Deployment(state=DeploymentState()) res_deployment_pb.CopyFrom(deployment_pb) state = self.describe(res_deployment_pb, yatai_service).state res_deployment_pb.state.CopyFrom(state) return ApplyDeploymentResponse(status=Status.OK(), deployment=res_deployment_pb) except BentoMLException as error: return ApplyDeploymentResponse( status=exception_to_return_status(error))
def delete(self, deployment_pb, yatai_service=None): try: state = self.describe(deployment_pb, yatai_service).state if state.state != DeploymentState.RUNNING: message = ('Failed to delete, no active deployment {name}. ' 'The current state is {state}'.format( name=deployment_pb.name, state=DeploymentState.State.Name(state.state), )) return DeleteDeploymentResponse(status=Status.ABORTED(message)) deployment_spec = deployment_pb.spec gcp_config = deployment_spec.gcp_function_operator_config bento_pb = yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, )) bento_service_metadata = bento_pb.bento.bento_service_metadata api_names = ([gcp_config.api_name] if gcp_config.api_name else [api.name for api in bento_service_metadata.apis]) with TempDirectory() as serverless_project_dir: generate_gcp_function_serverless_config( deployment_pb.name, api_names, serverless_project_dir, gcp_config.region, # BentoML namespace is mapping to serverless stage. stage=deployment_pb.namespace, ) try: response = call_serverless_command(['remove'], serverless_project_dir) if "Serverless: Stack removal finished..." in response: status = Status.OK() else: status = Status.ABORTED() except BentoMLException as e: status = Status.INTERNAL(str(e)) return DeleteDeploymentResponse(status=status) except BentoMLException as error: return DeleteDeploymentResponse( status=exception_to_return_status(error))
def describe(self, deployment_pb, yatai_service=None): try: deployment_spec = deployment_pb.spec gcp_config = deployment_spec.gcp_function_operator_config bento_pb = yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, )) bento_service_metadata = bento_pb.bento.bento_service_metadata api_names = ([gcp_config.api_name] if gcp_config.api_name else [api.name for api in bento_service_metadata.apis]) with TempDirectory() as serverless_project_dir: generate_gcp_function_serverless_config( deployment_pb.name, api_names, serverless_project_dir, gcp_config.region, # BentoML namespace is mapping to serverless stage. stage=deployment_pb.namespace, ) try: response = call_serverless_command(["info"], serverless_project_dir) info_json = parse_serverless_info_response_to_json_string( response) state = DeploymentState(state=DeploymentState.RUNNING, info_json=info_json) state.timestamp.GetCurrentTime() except BentoMLException as e: state = DeploymentState(state=DeploymentState.ERROR, error_message=str(e)) state.timestamp.GetCurrentTime() return DescribeDeploymentResponse(status=Status.OK(), state=state) except BentoMLException as error: return DescribeDeploymentResponse( status=exception_to_return_status(error))
def _upload_bento_service(self, saved_bento_path): bento_service_metadata = load_bento_service_metadata(saved_bento_path) get_bento_response = self.yatai_service.GetBento( GetBentoRequest( bento_name=bento_service_metadata.name, bento_version=bento_service_metadata.version, )) if get_bento_response.status.status_code == status_pb2.Status.OK: raise BentoMLException( "BentoService bundle {}:{} already registered in repository. Reset " "BentoService version with BentoService#set_version or bypass BentoML's" " model registry feature with BentoService#save_to_dir".format( bento_service_metadata.name, bento_service_metadata.version)) elif get_bento_response.status.status_code != status_pb2.Status.NOT_FOUND: raise BentoMLException( 'Failed accessing YataiService. {error_code}:' '{error_message}'.format( error_code=Status.Name( get_bento_response.status.status_code), error_message=get_bento_response.status.error_message, )) request = AddBentoRequest( bento_name=bento_service_metadata.name, bento_version=bento_service_metadata.version, ) response = self.yatai_service.AddBento(request) if response.status.status_code != status_pb2.Status.OK: raise BentoMLException( "Error adding BentoService bundle to repository: {}:{}".format( Status.Name(response.status.status_code), response.status.error_message, )) if response.uri.type == BentoUri.LOCAL: if os.path.exists(response.uri.uri): # due to copytree dst must not already exist shutil.rmtree(response.uri.uri) shutil.copytree(saved_bento_path, response.uri.uri) self._update_bento_upload_progress(bento_service_metadata) logger.info( "BentoService bundle '%s:%s' saved to: %s", bento_service_metadata.name, bento_service_metadata.version, response.uri.uri, ) # Return URI to saved bento in repository storage return response.uri.uri elif response.uri.type == BentoUri.S3: self._update_bento_upload_progress(bento_service_metadata, UploadStatus.UPLOADING, 0) fileobj = io.BytesIO() with tarfile.open(mode="w:gz", fileobj=fileobj) as tar: tar.add(saved_bento_path, arcname=bento_service_metadata.name) fileobj.seek(0, 0) files = { 'file': ('dummy', fileobj) } # dummy file name because file name # has been generated when getting the pre-signed signature. data = json.loads(response.uri.additional_fields) uri = data.pop('url') http_response = requests.post(uri, data=data, files=files) if http_response.status_code != 204: self._update_bento_upload_progress(bento_service_metadata, UploadStatus.ERROR) raise BentoMLException( f"Error saving BentoService bundle to S3. " f"{http_response.status_code}: {http_response.text}") self._update_bento_upload_progress(bento_service_metadata) logger.info( "Successfully saved BentoService bundle '%s:%s' to S3: %s", bento_service_metadata.name, bento_service_metadata.version, response.uri.uri, ) return response.uri.uri else: raise BentoMLException( f"Error saving Bento to target repository, URI type {response.uri.type}" f" at {response.uri.uri} not supported")
def get(self, bento_name, bento_version=None): get_bento_request = GetBentoRequest(bento_name=bento_name, bento_version=bento_version) return self.yatai_service.GetBento(get_bento_request)
def apply(self, deployment_pb, yatai_service, prev_deployment=None): try: ensure_docker_available_or_raise() deployment_spec = deployment_pb.spec sagemaker_config = deployment_spec.sagemaker_operator_config if sagemaker_config is None: raise BentoMLDeploymentException('Sagemaker configuration is missing.') bento_pb = yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, ) ) if bento_pb.bento.uri.type != BentoUri.LOCAL: raise BentoMLException( 'BentoML currently only support local repository' ) else: bento_path = bento_pb.bento.uri.uri ensure_deploy_api_name_exists_in_bento( [api.name for api in bento_pb.bento.bento_service_metadata.apis], [sagemaker_config.api_name], ) sagemaker_client = boto3.client('sagemaker', sagemaker_config.region) with TempDirectory() as temp_dir: sagemaker_project_dir = os.path.jon( temp_dir, deployment_spec.bento_name ) init_sagemaker_project(sagemaker_project_dir, bento_path) ecr_image_path = create_push_docker_image_to_ecr( deployment_spec.bento_name, deployment_spec.bento_version, sagemaker_project_dir, ) execution_role_arn = get_arn_role_from_current_aws_user() model_name = create_sagemaker_model_name( deployment_spec.bento_name, deployment_spec.bento_version ) sagemaker_model_info = { "ModelName": model_name, "PrimaryContainer": { "ContainerHostname": model_name, "Image": ecr_image_path, "Environment": { "API_NAME": sagemaker_config.api_name, "BENTO_SERVER_TIMEOUT": config().get( 'apiserver', 'default_timeout' ), "BENTO_SERVER_WORKERS": config().get( 'apiserver', 'default_gunicorn_workers_count' ), }, }, "ExecutionRoleArn": execution_role_arn, } logger.info("Creating sagemaker model %s", model_name) try: create_model_response = sagemaker_client.create_model( **sagemaker_model_info ) logger.debug("AWS create model response: %s", create_model_response) except ClientError as e: status = _parse_aws_client_exception_or_raise(e) status.error_message = ( 'Failed to create model for SageMaker Deployment: %s', status.error_message, ) return ApplyDeploymentResponse(status=status, deployment=deployment_pb) production_variants = [ { "VariantName": generate_aws_compatible_string( deployment_spec.bento_name ), "ModelName": model_name, "InitialInstanceCount": sagemaker_config.instance_count, "InstanceType": sagemaker_config.instance_type, } ] endpoint_config_name = create_sagemaker_endpoint_config_name( deployment_spec.bento_name, deployment_spec.bento_version ) logger.info( "Creating Sagemaker endpoint %s configuration", endpoint_config_name ) try: create_config_response = sagemaker_client.create_endpoint_config( EndpointConfigName=endpoint_config_name, ProductionVariants=production_variants, ) logger.debug( "AWS create endpoint config response: %s", create_config_response ) except ClientError as e: # create endpoint failed, will remove previously created model cleanup_model_error = _cleanup_sagemaker_model( sagemaker_client, deployment_spec.bento_name, deployment_spec.bento_version, ) if cleanup_model_error: cleanup_model_error.error_message = ( 'Failed to clean up model after unsuccessfully ' 'create endpoint config: %s', cleanup_model_error.error_message, ) return ApplyDeploymentResponse( status=cleanup_model_error, deployment=deployment_pb ) status = _parse_aws_client_exception_or_raise(e) status.error_message = ( 'Failed to create endpoint config for SageMaker deployment: %s', status.error_message, ) return ApplyDeploymentResponse(status=status, deployment=deployment_pb) endpoint_name = generate_aws_compatible_string( deployment_pb.namespace + '-' + deployment_spec.bento_name ) try: if prev_deployment: logger.debug("Updating sagemaker endpoint %s", endpoint_name) update_endpoint_response = sagemaker_client.update_endpoint( EndpointName=endpoint_name, EndpointConfigName=endpoint_config_name, ) logger.debug( "AWS update endpoint response: %s", update_endpoint_response ) else: logger.debug("Creating sagemaker endpoint %s", endpoint_name) create_endpoint_response = sagemaker_client.create_endpoint( EndpointName=endpoint_name, EndpointConfigName=endpoint_config_name, ) logger.debug( "AWS create endpoint response: %s", create_endpoint_response ) except ClientError as e: # create/update endpoint failed, will remove previously created config # and then remove the model cleanup_endpoint_config_error = _cleanup_sagemaker_endpoint_config( client=sagemaker_client, name=deployment_spec.bento_name, version=deployment_spec.bento_version, ) if cleanup_endpoint_config_error: cleanup_endpoint_config_error.error_message = ( 'Failed to clean up endpoint config after unsuccessfully ' 'apply SageMaker deployment: %s', cleanup_endpoint_config_error.error_message, ) return ApplyDeploymentResponse( status=cleanup_endpoint_config_error, deployment=deployment_pb ) cleanup_model_error = _cleanup_sagemaker_model( client=sagemaker_client, name=deployment_spec.bento_name, version=deployment_spec.bento_version, ) if cleanup_model_error: cleanup_model_error.error_message = ( 'Failed to clean up model after unsuccessfully ' 'apply SageMaker deployment: %s', cleanup_model_error.error_message, ) return ApplyDeploymentResponse( status=cleanup_model_error, deployment=deployment_pb ) status = _parse_aws_client_exception_or_raise(e) status.error_message = ( 'Failed to apply SageMaker deployment: %s', status.error_message, ) return ApplyDeploymentResponse(status=status, deployment=deployment_pb) res_deployment_pb = Deployment(state=DeploymentState()) res_deployment_pb.CopyFrom(deployment_pb) return ApplyDeploymentResponse( status=Status.OK(), deployment=res_deployment_pb ) except BentoMLException as error: return ApplyDeploymentResponse(status=exception_to_return_status(error))
def describe(self, deployment_pb): try: deployment_spec = deployment_pb.spec lambda_deployment_config = deployment_spec.aws_lambda_operator_config lambda_deployment_config.region = (lambda_deployment_config.region or get_default_aws_region()) if not lambda_deployment_config.region: raise InvalidArgument('AWS region is missing') bento_pb = self.yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, )) bento_service_metadata = bento_pb.bento.bento_service_metadata api_names = ([lambda_deployment_config.api_name] if lambda_deployment_config.api_name else [api.name for api in bento_service_metadata.apis]) try: cf_client = boto3.client('cloudformation', lambda_deployment_config.region) cloud_formation_stack_result = cf_client.describe_stacks( StackName='{ns}-{name}'.format(ns=deployment_pb.namespace, name=deployment_pb.name)) stack_result = cloud_formation_stack_result.get('Stacks')[0] # https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/\ # using-cfn-describing-stacks.html success_status = ['CREATE_COMPLETE', 'UPDATE_COMPLETE'] if stack_result['StackStatus'] in success_status: if stack_result.get('Outputs'): outputs = stack_result['Outputs'] else: return DescribeDeploymentResponse( status=Status.ABORTED( '"Outputs" field is not present'), state=DeploymentState( state=DeploymentState.ERROR, error_message='"Outputs" field is not present', ), ) elif stack_result[ 'StackStatus'] in FAILED_CLOUDFORMATION_STACK_STATUS: state = DeploymentState(state=DeploymentState.FAILED) state.timestamp.GetCurrentTime() return DescribeDeploymentResponse(status=Status.OK(), state=state) else: state = DeploymentState(state=DeploymentState.PENDING) state.timestamp.GetCurrentTime() return DescribeDeploymentResponse(status=Status.OK(), state=state) except Exception as error: # pylint: disable=broad-except state = DeploymentState(state=DeploymentState.ERROR, error_message=str(error)) state.timestamp.GetCurrentTime() return DescribeDeploymentResponse(status=Status.INTERNAL( str(error)), state=state) outputs = {o['OutputKey']: o['OutputValue'] for o in outputs} info_json = {} if 'EndpointUrl' in outputs: info_json['endpoints'] = [ outputs['EndpointUrl'] + '/' + api_name for api_name in api_names ] if 'S3Bucket' in outputs: info_json['s3_bucket'] = outputs['S3Bucket'] state = DeploymentState(state=DeploymentState.RUNNING, info_json=json.dumps(info_json)) state.timestamp.GetCurrentTime() return DescribeDeploymentResponse(status=Status.OK(), state=state) except BentoMLException as error: return DescribeDeploymentResponse(status=error.status_proto)
def apply(self, deployment_pb, yatai_service, prev_deployment=None): try: ensure_docker_available_or_raise() deployment_spec = deployment_pb.spec aws_config = deployment_spec.aws_lambda_operator_config bento_pb = yatai_service.GetBento( GetBentoRequest( bento_name=deployment_spec.bento_name, bento_version=deployment_spec.bento_version, ) ) if bento_pb.bento.uri.type != BentoUri.LOCAL: raise BentoMLException( 'BentoML currently only support local repository' ) else: bento_path = bento_pb.bento.uri.uri bento_service_metadata = bento_pb.bento.bento_service_metadata template = 'aws-python3' if version.parse(bento_service_metadata.env.python_version) < version.parse( '3.0.0' ): template = 'aws-python' api_names = ( [aws_config.api_name] if aws_config.api_name else [api.name for api in bento_service_metadata.apis] ) ensure_deploy_api_name_exists_in_bento( [api.name for api in bento_service_metadata.apis], api_names ) with TempDirectory() as serverless_project_dir: init_serverless_project_dir( serverless_project_dir, bento_path, deployment_pb.name, deployment_spec.bento_name, template, ) generate_aws_lambda_handler_py( deployment_spec.bento_name, api_names, serverless_project_dir ) generate_aws_lambda_serverless_config( bento_service_metadata.env.python_version, deployment_pb.name, api_names, serverless_project_dir, aws_config.region, # BentoML deployment namespace is mapping to serverless `stage` # concept stage=deployment_pb.namespace, ) logger.info( 'Installing additional packages: serverless-python-requirements' ) install_serverless_plugin( "serverless-python-requirements", serverless_project_dir ) logger.info('Deploying to AWS Lambda') call_serverless_command(["deploy"], serverless_project_dir) res_deployment_pb = Deployment(state=DeploymentState()) res_deployment_pb.CopyFrom(deployment_pb) state = self.describe(res_deployment_pb, yatai_service).state res_deployment_pb.state.CopyFrom(state) return ApplyDeploymentResponse( status=Status.OK(), deployment=res_deployment_pb ) except BentoMLException as error: return ApplyDeploymentResponse(status=exception_to_return_status(error))