def save(bento_service, base_path=None, version=None, labels=None): """ Save and register the given BentoService via BentoML's built-in model management system. BentoML by default keeps track of all the SavedBundle's files and metadata in local file system under the $BENTOML_HOME(~/bentoml) directory. Users can also configure BentoML to save their BentoService to a shared Database and cloud object storage such as AWS S3. :param bento_service: target BentoService instance to be saved :param base_path: optional - override repository base path :param version: optional - save with version override :param labels: optional - user defined labels :return: saved_path: file path to where the BentoService is saved """ logger.warning( "`from bentoml import save` is being deprecated soon, use BentoService#save " "and BentoService#save_to_dir instead.") from bentoml.yatai.client import YataiClient from bentoml.yatai.yatai_service import get_yatai_service if base_path: yatai_service = get_yatai_service(file_system_directory=base_path) yatai_client = YataiClient(yatai_service) else: yatai_client = YataiClient() return yatai_client.repository.upload(bento_service, version, labels)
def save(bento_service, base_path=None, version=None): from bentoml.yatai.client import YataiClient from bentoml.yatai import get_yatai_service if base_path: yatai_service = get_yatai_service(repo_base_url=base_path) yatai_client = YataiClient(yatai_service) else: yatai_client = YataiClient() return yatai_client.repository.upload(bento_service, version)
def test_easyocr_artifact_packs(): svc = EasyOCRService() lang_list = ['ch_sim', 'en'] recog_network = "zh_sim_g2" model = easyocr.Reader( lang_list=lang_list, gpu=False, download_enabled=True, recog_network=recog_network, ) svc.pack('chinese_small', model, lang_list=lang_list, recog_network=recog_network) assert [x[1] for x in model.readtext(IMAGE_PATH) ] == (TEST_RESULT), 'Run inference before saving the artifact' saved_path = svc.save() loaded_svc = bentoml.load(saved_path) assert loaded_svc.predict(imageio.imread(IMAGE_PATH))['text'] == ( TEST_RESULT), 'Run inference after saving the artifact' # clean up saved bundle yc = YataiClient() yc.repository.delete(f'{svc.name}:{svc.version}')
def delete(bento): yatai_client = YataiClient() name, version = bento.split(':') if not name and not version: _echo( 'BentoService name or version is missing. Please provide in the ' 'format of name:version', CLI_COLOR_ERROR, ) return if not click.confirm( f'Are you sure about delete {bento}? This will delete the BentoService ' f'saved bundle files permanently'): return result = yatai_client.repository.dangerously_delete_bento( name=name, version=version) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status) _echo( f'Failed to delete Bento {name}:{version} ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return _echo(f'BentoService {name}:{version} deleted')
def update( name, namespace, bento, min_instances, max_burst, premium_plan_sku, output, wait ): yatai_client = YataiClient() if bento: bento_name, bento_version = bento.split(':') else: bento_name = None bento_version = None with Spinner(f'Updating Azure Functions deployment {name}'): result = yatai_client.deployment.update_azure_functions_deployment( namespace=namespace, deployment_name=name, bento_name=bento_name, bento_version=bento_version, min_instances=min_instances, max_burst=max_burst, premium_plan_sku=premium_plan_sku, wait=wait, ) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status ) raise CLIException(f'{error_code}:{error_message}') _echo( f'Successfully updated Azure Functions deployment {name}', CLI_COLOR_SUCCESS, ) _print_deployment_info(result.deployment, output)
def list_deployment(namespace, limit, labels, order_by, asc, output): yatai_client = YataiClient() track_cli('deploy-list', PLATFORM_NAME) try: list_result = yatai_client.deployment.list_azure_functions_deployments( limit=limit, labels_query=labels, namespace=namespace, order_by=order_by, ascending_order=asc, ) if list_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( list_result.status ) _echo( f'Failed to list Azure Functions deployments ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return _print_deployments_info(list_result.deployments, output) except BentoMLException as e: _echo( f'Failed to list Azure Functions deployments {str(e)}', CLI_COLOR_ERROR )
def delete(name, namespace, force): yatai_client = YataiClient() get_deployment_result = yatai_client.deployment.get(namespace, name) if get_deployment_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( get_deployment_result.status) _echo( f'Failed to get deployment {name} for deletion. ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return platform = DeploymentSpec.DeploymentOperator.Name( get_deployment_result.deployment.spec.operator) track_cli('deploy-delete', platform) result = yatai_client.deployment.delete(name, namespace, force) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status) _echo( f'Failed to delete deployment {name}. {error_code}:{error_message}', CLI_COLOR_ERROR, ) return extra_properties = {} if get_deployment_result.deployment.created_at: stopped_time = datetime.utcnow() extra_properties['uptime'] = int( (stopped_time - get_deployment_result.deployment.created_at.ToDatetime() ).total_seconds()) track_cli('deploy-delete-success', platform, extra_properties) _echo('Successfully deleted deployment "{}"'.format(name), CLI_COLOR_SUCCESS)
def get(name, namespace, output): yatai_client = YataiClient() track_cli('deploy-get', PLATFORM_NAME) try: get_result = yatai_client.deployment.get(namespace, name) if get_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( get_result.status) _echo( f'Failed to get AWS Sagemaker deployment {name}. ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return describe_result = yatai_client.deployment.describe(namespace, name) if describe_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( describe_result.status) _echo( f'Failed to retrieve the latest status for AWS Sagemaker ' f'deployment {name}. {error_code}:{error_message}', CLI_COLOR_ERROR, ) return get_result.deployment.state.CopyFrom(describe_result.state) _print_deployment_info(get_result.deployment, output) except BentoMLException as e: _echo( f'Failed to get AWS Sagemaker deployment {name} {str(e)}', CLI_COLOR_ERROR, )
def resolve_bundle_path(bento, pip_installed_bundle_path): if pip_installed_bundle_path: assert ( bento is None ), "pip installed BentoService commands should not have Bento argument" return pip_installed_bundle_path if os.path.isdir(bento) or is_s3_url(bento): # bundler already support loading local and s3 path return bento elif ":" in bento: # assuming passing in BentoService in the form of Name:Version tag yatai_client = YataiClient() name, version = bento.split(':') get_bento_result = yatai_client.repository.get(name, version) if get_bento_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( get_bento_result.status) raise BentoMLException( f'BentoService {name}:{version} not found - ' f'{error_code}:{error_message}') return get_bento_result.bento.uri.uri else: raise BentoMLException( f'BentoService "{bento}" not found - either specify the file path of ' f'the BentoService saved bundle, or the BentoService id in the form of ' f'"name:version"')
def deploy( name, namespace, bento, labels, region, api_name, memory_size, timeout, output, wait, ): yatai_client = YataiClient() bento_name, bento_version = bento.split(':') with Spinner(f'Deploying "{bento}" to AWS Lambda '): result = yatai_client.deployment.create_lambda_deployment( name=name, namespace=namespace, bento_name=bento_name, bento_version=bento_version, api_name=api_name, region=region, memory_size=memory_size, timeout=timeout, labels=labels, wait=wait, ) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status) raise CLIException(f'{error_code}:{error_message}') _echo(f'Successfully created AWS Lambda deployment {name}', CLI_COLOR_SUCCESS) _print_deployment_info(result.deployment, output)
def describe(name, output, namespace): track_cli('deploy-describe') yatai_client = YataiClient() result = yatai_client.deployment.describe(namespace, name) if result.status.status_code != status_pb2.Status.OK: _echo( 'Failed to describe deployment {name}. {error_code}:' '{error_message}'.format( name=name, error_code=status_pb2.Status.Code.Name( result.status.status_code), error_message=result.status.error_message, ), CLI_COLOR_ERROR, ) else: get_result = yatai_client.deployment.get(namespace, name) if get_result.status.status_code != status_pb2.Status.OK: _echo( 'Failed to describe deployment {name}. {error_code}:' '{error_message}'.format( name=name, error_code=status_pb2.Status.Code.Name( result.status.status_code), error_message=result.status.error_message, ), CLI_COLOR_ERROR, ) deployment_pb = get_result.deployment deployment_pb.state.CopyFrom(result.state) _print_deployment_info(deployment_pb, output)
def update(name, namespace, bento, memory_size, timeout, output, wait): yatai_client = YataiClient() if bento: bento_name, bento_version = bento.split(':') else: bento_name = None bento_version = None with Spinner('Updating Lambda deployment '): result = yatai_client.deployment.update_lambda_deployment( bento_name=bento_name, bento_version=bento_version, deployment_name=name, namespace=namespace, memory_size=memory_size, timeout=timeout, wait=wait, ) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status) raise CLIException(f'{error_code}:{error_message}') _echo( f'Successfully updated AWS Lambda deployment {name}', CLI_COLOR_SUCCESS, ) _print_deployment_info(result.deployment, output)
def get(name, namespace, output): yatai_client = YataiClient() track_cli('deploy-get') get_result = yatai_client.deployment.get(namespace, name) if get_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( get_result.status ) _echo( f'Failed to get deployment {name}. ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return describe_result = yatai_client.deployment.describe( namespace=namespace, name=name ) if describe_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( describe_result.status ) _echo( f'Failed to retrieve the latest status for deployment' f' {name}. {error_code}:{error_message}', CLI_COLOR_ERROR, ) return get_result.deployment.state.CopyFrom(describe_result.state) _print_deployment_info(get_result.deployment, output)
def test_fasttext_artifact_pack(fasttext_classifier_class): @contextlib.contextmanager def _temp_filename_with_contents(contents): temporary_file = tempfile.NamedTemporaryFile(suffix=".txt", mode="w+") temporary_file.write(contents) # Set file pointer to beginning to ensure correct read temporary_file.seek(0) yield temporary_file.name temporary_file.close() with _temp_filename_with_contents("__label__bar foo") as filename: model = fasttext.train_supervised(input=filename) svc = fasttext_classifier_class() svc.pack('model', model) assert svc.predict(test_json)[0] == ( '__label__bar', ), 'Run inference before saving the artifact' saved_path = svc.save() loaded_svc = bentoml.load(saved_path) assert loaded_svc.predict(test_json)[0] == ( '__label__bar', ), 'Run inference after saving the artifact' # clean up saved bundle yc = YataiClient() yc.repository.dangerously_delete_bento(svc.name, svc.version)
def create(deployment_yaml, output, wait): yatai_client = YataiClient() platform_name = deployment_yaml.get('spec', {}).get('operator') deployment_name = deployment_yaml.get('name') track_cli('deploy-create', platform_name) try: with Spinner('Creating deployment '): result = yatai_client.deployment.create(deployment_yaml, wait) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status ) track_cli( 'deploy-create-failure', platform_name, {'error_code': error_code, 'error_message': error_message}, ) _echo( f'Failed to create deployment {deployment_name} ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return track_cli('deploy-create-success', platform_name) _echo( f'Successfully created deployment {deployment_name}', CLI_COLOR_SUCCESS, ) _print_deployment_info(result.deployment, output) except BentoMLException as e: _echo( f'Failed to create deployment {deployment_name} {str(e)}', CLI_COLOR_ERROR, )
def get(bento, limit, ascending_order, print_location, output): if ':' in bento: name, version = bento.split(':') else: name = bento version = None yatai_client = YataiClient() if name and version: output = output or 'json' get_bento_result = yatai_client.repository.get(name, version) if get_bento_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( get_bento_result.status) raise CLIException(f'{error_code}:{error_message}') if print_location: _echo(get_bento_result.bento.uri.uri) return _print_bento_info(get_bento_result.bento, output) elif name: output = output or 'table' list_bento_versions_result = yatai_client.repository.list( bento_name=name, limit=limit, ascending_order=ascending_order) if list_bento_versions_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( list_bento_versions_result.status) raise CLIException(f'{error_code}:{error_message}') _print_bentos_info(list_bento_versions_result.bentos, output)
def delete(bentos, yes): """Delete saved BentoService. BENTO is the target BentoService to be deleted, referenced by its name and version in format of name:version. For example: "iris_classifier:v1.2.0" `bentoml delete` command also supports deleting multiple saved BentoService at once, by providing name version tag separated by ",", for example: `bentoml delete iris_classifier:v1.2.0,my_svc:v1,my_svc2:v3` """ yatai_client = YataiClient() for bento in bentos: name, version = bento.split(':') if not name and not version: raise CLIException( 'BentoService name or version is missing. Please provide in the ' 'format of name:version') if not yes and not click.confirm( f'Are you sure about delete {bento}? This will delete the BentoService ' f'saved bundle files permanently'): return result = yatai_client.repository.dangerously_delete_bento( name=name, version=version) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status) raise CLIException(f'{error_code}:{error_message}') _echo(f'BentoService {name}:{version} deleted')
def deploy( name, namespace, bento, labels, region, api_name, memory_size, timeout, output, wait, ): track_cli('deploy-create', PLATFORM_NAME) yatai_client = YataiClient() bento_name, bento_version = bento.split(':') try: with Spinner(f'Deploying "{bento}" to AWS Lambda '): result = yatai_client.deployment.create_lambda_deployment( name=name, namespace=namespace, bento_name=bento_name, bento_version=bento_version, api_name=api_name, region=region, memory_size=memory_size, timeout=timeout, labels=labels, wait=wait, ) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status) track_cli( 'deploy-create-failure', PLATFORM_NAME, { 'error_code': error_code, 'error_message': error_message }, ) _echo( f'Failed to create AWS Lambda deployment {name} ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return track_cli('deploy-create-success', PLATFORM_NAME) _echo(f'Successfully created AWS Lambda deployment {name}', CLI_COLOR_SUCCESS) _print_deployment_info(result.deployment, output) except BentoMLException as e: track_cli('deploy-create-failure', PLATFORM_NAME, {'error_message': str(e)}) _echo( f'Failed to create AWS Lambda deployment {name} {str(e)}', CLI_COLOR_ERROR, )
def deploy( name, bento, namespace, labels, region, instance_type, instance_count, num_of_gunicorn_workers_per_instance, api_name, timeout, output, wait, ): # use the DeploymentOperator name in proto to be consistent with amplitude track_cli('deploy-create', PLATFORM_NAME) bento_name, bento_version = bento.split(':') yatai_client = YataiClient() try: with Spinner('Deploying Sagemaker deployment '): result = yatai_client.deployment.create_sagemaker_deployment( name=name, namespace=namespace, labels=labels, bento_name=bento_name, bento_version=bento_version, instance_count=instance_count, instance_type=instance_type, num_of_gunicorn_workers_per_instance= num_of_gunicorn_workers_per_instance, # noqa E501 api_name=api_name, timeout=timeout, region=region, wait=wait, ) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status) _echo( f'Failed to create AWS Sagemaker deployment {name} ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return track_cli('deploy-create-success', PLATFORM_NAME) _echo( f'Successfully created AWS Sagemaker deployment {name}', CLI_COLOR_SUCCESS, ) _print_deployment_info(result.deployment, output) except BentoMLException as e: _echo( 'Failed to create AWS Sagemaker deployment {}.: {}'.format( name, str(e)), CLI_COLOR_ERROR, )
def delete(name, namespace, force): yatai_client = YataiClient() get_deployment_result = yatai_client.deployment.get( namespace=namespace, name=name ) if get_deployment_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( get_deployment_result.status ) _echo( f'Failed to get AWS Lambda deployment {name} for deletion ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return track_cli('deploy-delete', PLATFORM_NAME) try: result = yatai_client.deployment.delete( namespace=namespace, deployment_name=name, force_delete=force ) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status ) track_cli( 'deploy-delete-failure', PLATFORM_NAME, {'error_code': error_code, 'error_message': error_message}, ) _echo( f'Failed to delete AWS Lambda deployment {name} ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return extra_properties = {} if get_deployment_result.deployment.created_at: stopped_time = datetime.utcnow() extra_properties['uptime'] = int( ( stopped_time - get_deployment_result.deployment.created_at.ToDatetime() ).total_seconds() ) track_cli('deploy-delete-success', PLATFORM_NAME, extra_properties) _echo( f'Successfully deleted AWS Lambda deployment "{name}"', CLI_COLOR_SUCCESS, ) except BentoMLException as e: track_cli('deploy-delete-failure', PLATFORM_NAME, {'error_message': str(e)}) _echo( f'Failed to delete AWS Lambda deployment {name} {str(e)}', CLI_COLOR_ERROR, )
def test_fastai2_artifact_pack(fastai_learner): svc = FastaiClassifier() svc.pack('model', fastai_learner) assert svc.predict(test_df) == 5.0, 'Run inference before saving' saved_path = svc.save() loaded_svc = bentoml.load(saved_path) assert loaded_svc.predict(test_df) == 5.0, 'Run inference from saved model' yc = YataiClient() yc.repository.delete(f'{svc.name}:{svc.version}')
def deploy( namespace, name, bento, location, min_instances, max_burst, premium_plan_sku, labels, function_auth_level, output, wait, ): track_cli('deploy-create', PLATFORM_NAME) bento_name, bento_version = bento.split(':') yatai_client = YataiClient() try: with Spinner(f'Deploying {bento} to Azure Functions'): result = yatai_client.deployment.create_azure_functions_deployment( name=name, namespace=namespace, labels=labels, bento_name=bento_name, bento_version=bento_version, location=location, min_instances=min_instances, max_burst=max_burst, premium_plan_sku=premium_plan_sku, function_auth_level=function_auth_level, wait=wait, ) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status ) _echo( f'Failed to create Azure Functions deployment {name} ' f'{error_code}:{error_message}', CLI_COLOR_ERROR, ) return track_cli('deploy-create-success', PLATFORM_NAME) _echo( f'Successfully created Azure Functions deployment {name}', CLI_COLOR_SUCCESS, ) _print_deployment_info(result.deployment, output) except BentoMLException as e: _echo( f'Failed to create Azure Functions deployment {name}. {str(e)}', CLI_COLOR_ERROR, )
def apply(deployment_yaml, output, wait): track_cli('deploy-apply', deployment_yaml.get('spec', {}).get('operator')) try: yatai_client = YataiClient() result = yatai_client.deployment.apply(deployment_yaml) if result.status.status_code != status_pb2.Status.OK: _echo( 'Failed to apply deployment {name}. ' '{error_code}:{error_message}'.format( name=deployment_yaml.get('name'), error_code=status_pb2.Status.Code.Name( result.status.status_code), error_message=result.status.error_message, ), CLI_COLOR_ERROR, ) else: if wait: result_state = get_state_after_await_action_complete( yatai_client=yatai_client, name=deployment_yaml.get('name'), namespace=deployment_yaml.get('namespace'), message='Applying deployment', ) if result_state.status.status_code != status_pb2.Status.OK: _echo( 'Created deployment {name}, failed to retrieve latest' ' status. {error_code}:{error_message}'.format( name=deployment_yaml.get('name'), error_code=status_pb2.Status.Code.Name( result_state.status.status_code), error_message=result_state.status. error_message, )) return result.deployment.state.CopyFrom(result_state.state) track_cli( 'deploy-apply-success', deployment_yaml.get('spec', {}).get('operator'), ) _echo( 'Successfully applied spec to deployment {}'.format( deployment_yaml.get('name')), CLI_COLOR_SUCCESS, ) _print_deployment_info(result.deployment, output) except BentoMLException as e: _echo( 'Failed to apply deployment {name}. Error message: {message}'. format(name=deployment_yaml.get('name'), message=e))
def test_dangerously_delete(example_bento_service_class): yc = YataiClient() svc = example_bento_service_class() version = f"test_{uuid.uuid4().hex}" svc.set_version(version_str=version) svc.save() result = yc.repository.dangerously_delete_bento(svc.name, svc.version) assert result.status.status_code == 0 logger.debug('Saving the bento service again with the same name:version') svc.save() second_delete_result = yc.repository.dangerously_delete_bento( svc.name, svc.version) assert second_delete_result.status.status_code == 0
def test_pytorch_artifact_pack(pytorch_classifier_class): svc = pytorch_classifier_class() model = PytorchModel() svc.pack('model', model) assert svc.predict(test_df) == 5.0, 'Run inference before save the artifact' saved_path = svc.save() loaded_svc = bentoml.load(saved_path) assert loaded_svc.predict(test_df) == 5.0, 'Run inference from saved artifact' # clean up saved bundle yc = YataiClient() yc.repository.delete(f'{svc.name}:{svc.version}')
def test_gluon_artifact_pack(gluon_classifier, trained_gluon_model): gluon_classifier.pack('model', trained_gluon_model) assert gluon_classifier.predict([0]) == [0] saved_path = gluon_classifier.save() loaded_svc = bentoml.load(saved_path) assert loaded_svc.predict([0]) == [0] # clean up saved bundle yc = YataiClient() yc.repository.delete(f'{gluon_classifier.name}:{gluon_classifier.version}')
def list_deployment(namespace, limit, labels, order_by, asc, output): yatai_client = YataiClient() list_result = yatai_client.deployment.list_sagemaker_deployments( limit=limit, labels_query=labels, namespace=namespace, order_by=order_by, ascending_order=asc, ) if list_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( list_result.status) raise CLIException(f'{error_code}:{error_message}') _print_deployments_info(list_result.deployments, output)
def test_onnx_model_artifact_pack_modelproto_with_onnxruntime_backend( onnx_iris_classifier_class, sklearn_onnx_model ): svc = onnx_iris_classifier_class() svc.pack('model', sklearn_onnx_model) assert svc.predict(test_df)[0] == [1], "Run inference before saving onnx artifact" saved_path = svc.save() loaded_svc = bentoml.load(saved_path) assert loaded_svc.predict(test_df)[0] == [1], 'Run inference after save onnx model' # clean up saved bundle yc = YataiClient() yc.repository.dangerously_delete_bento(svc.name, svc.version)
def get(name, namespace, output): yatai_client = YataiClient() describe_result = yatai_client.deployment.describe(namespace, name) if describe_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( describe_result.status) raise CLIException(f'{error_code}:{error_message}') get_result = yatai_client.deployment.get(namespace, name) if get_result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( get_result.status) raise CLIException(f'{error_code}:{error_message}') _print_deployment_info(get_result.deployment, output)
def update( name, namespace, bento, api_name, instance_type, instance_count, num_of_gunicorn_workers_per_instance, timeout, output, wait, ): yatai_client = YataiClient() track_cli('deploy-update', PLATFORM_NAME) if bento: bento_name, bento_version = bento.split(':') else: bento_name = None bento_version = None try: with Spinner('Updating Sagemaker deployment '): result = yatai_client.deployment.update_sagemaker_deployment( namespace=namespace, deployment_name=name, bento_name=bento_name, bento_version=bento_version, instance_count=instance_count, instance_type=instance_type, num_of_gunicorn_workers_per_instance= num_of_gunicorn_workers_per_instance, # noqa E501 timeout=timeout, api_name=api_name, wait=wait, ) if result.status.status_code != status_pb2.Status.OK: error_code, error_message = status_pb_to_error_code_and_message( result.status) _echo(f'Failed to update AWS Sagemaker deployment {name}.' f'{error_code}:{error_message}') track_cli('deploy-update-success', PLATFORM_NAME) _echo( f'Successfully updated AWS Sagemaker deployment {name}', CLI_COLOR_SUCCESS, ) _print_deployment_info(result.deployment, output) except BentoMLException as e: _echo( f'Failed to update AWS Sagemaker deployment {name}: {str(e)}', CLI_COLOR_ERROR, )