コード例 #1
0
ファイル: aws_ec2.py プロジェクト: Nikunj3masarani/bentoml
 def deploy(
     name,
     bento,
     namespace,
     region,
     min_size,
     desired_capacity,
     max_size,
     instance_type,
     ami_id,
     output,
     wait,
 ):
     yatai_client = get_default_yatai_client()
     bento_name, bento_version = bento.split(":")
     with Spinner(f"Deploying {bento} to AWS EC2"):
         result = yatai_client.deployment.create_ec2_deployment(
             name=name,
             namespace=namespace,
             bento_name=bento_name,
             bento_version=bento_version,
             region=region,
             min_size=min_size,
             desired_capacity=desired_capacity,
             max_size=max_size,
             instance_type=instance_type,
             ami_id=ami_id,
             wait=wait,
         )
     if result.status.status_code != yatai_proto.status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _print_deployment_info(result.deployment, output)
     _echo("Successfully created AWS EC2 deployment", CLI_COLOR_SUCCESS)
コード例 #2
0
 def create(deployment_yaml, output, wait):
     yatai_client = YataiClient()
     platform_name = deployment_yaml.get('spec', {}).get('operator')
     deployment_name = deployment_yaml.get('name')
     track_cli('deploy-create', platform_name)
     try:
         with Spinner('Creating deployment '):
             result = yatai_client.deployment.create(deployment_yaml, wait)
         if result.status.status_code != status_pb2.Status.OK:
             error_code, error_message = status_pb_to_error_code_and_message(
                 result.status
             )
             track_cli(
                 'deploy-create-failure',
                 platform_name,
                 {'error_code': error_code, 'error_message': error_message},
             )
             _echo(
                 f'Failed to create deployment {deployment_name} '
                 f'{error_code}:{error_message}',
                 CLI_COLOR_ERROR,
             )
             return
         track_cli('deploy-create-success', platform_name)
         _echo(
             f'Successfully created deployment {deployment_name}', CLI_COLOR_SUCCESS,
         )
         _print_deployment_info(result.deployment, output)
     except BentoMLException as e:
         _echo(
             f'Failed to create deployment {deployment_name} {str(e)}',
             CLI_COLOR_ERROR,
         )
コード例 #3
0
ファイル: aws_lambda.py プロジェクト: subhayuroy/BentoML
 def update(name, namespace, bento, memory_size, timeout, output, wait):
     _echo(
         message=
         'AWS Lambda deployment functionalities are being migrated to a '
         'separate tool and related CLI commands will be deprecated in BentoML '
         'itself, please use https://github.com/bentoml/aws-lambda-deploy '
         'going forward.',
         color='yellow',
     )
     yatai_client = get_default_yatai_client()
     if bento:
         bento_name, bento_version = bento.split(':')
     else:
         bento_name = None
         bento_version = None
     with Spinner('Updating Lambda deployment '):
         result = yatai_client.deployment.update_lambda_deployment(
             bento_name=bento_name,
             bento_version=bento_version,
             deployment_name=name,
             namespace=namespace,
             memory_size=memory_size,
             timeout=timeout,
             wait=wait,
         )
     if result.status.status_code != yatai_proto.status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _echo(
         f'Successfully updated AWS Lambda deployment {name}',
         CLI_COLOR_SUCCESS,
     )
     _print_deployment_info(result.deployment, output)
コード例 #4
0
ファイル: aws_lambda.py プロジェクト: KarimSamehSadek/BentoML
 def update(name, namespace, bento, memory_size, timeout, output, wait):
     yatai_client = YataiClient()
     if bento:
         bento_name, bento_version = bento.split(':')
     else:
         bento_name = None
         bento_version = None
     with Spinner('Updating Lambda deployment '):
         result = yatai_client.deployment.update_lambda_deployment(
             bento_name=bento_name,
             bento_version=bento_version,
             deployment_name=name,
             namespace=namespace,
             memory_size=memory_size,
             timeout=timeout,
             wait=wait,
         )
     if result.status.status_code != status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _echo(
         f'Successfully updated AWS Lambda deployment {name}',
         CLI_COLOR_SUCCESS,
     )
     _print_deployment_info(result.deployment, output)
コード例 #5
0
 def update(name, namespace, bento, min_instances, max_burst,
            premium_plan_sku, output, wait):
     yatai_client = get_default_yatai_client()
     if bento:
         bento_name, bento_version = bento.split(':')
     else:
         bento_name = None
         bento_version = None
     with Spinner(f'Updating Azure Functions deployment {name}'):
         result = yatai_client.deployment.update_azure_functions_deployment(
             namespace=namespace,
             deployment_name=name,
             bento_name=bento_name,
             bento_version=bento_version,
             min_instances=min_instances,
             max_burst=max_burst,
             premium_plan_sku=premium_plan_sku,
             wait=wait,
         )
         if result.status.status_code != yatai_proto.status_pb2.Status.OK:
             error_code, error_message = status_pb_to_error_code_and_message(
                 result.status)
             raise CLIException(f'{error_code}:{error_message}')
         _echo(
             f'Successfully updated Azure Functions deployment {name}',
             CLI_COLOR_SUCCESS,
         )
         _print_deployment_info(result.deployment, output)
コード例 #6
0
ファイル: aws_lambda.py プロジェクト: KarimSamehSadek/BentoML
 def deploy(
     name,
     namespace,
     bento,
     labels,
     region,
     api_name,
     memory_size,
     timeout,
     output,
     wait,
 ):
     yatai_client = YataiClient()
     bento_name, bento_version = bento.split(':')
     with Spinner(f'Deploying "{bento}" to AWS Lambda '):
         result = yatai_client.deployment.create_lambda_deployment(
             name=name,
             namespace=namespace,
             bento_name=bento_name,
             bento_version=bento_version,
             api_name=api_name,
             region=region,
             memory_size=memory_size,
             timeout=timeout,
             labels=labels,
             wait=wait,
         )
     if result.status.status_code != status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _echo(f'Successfully created AWS Lambda deployment {name}',
           CLI_COLOR_SUCCESS)
     _print_deployment_info(result.deployment, output)
コード例 #7
0
 def deploy(
     name,
     namespace,
     bento,
     labels,
     region,
     api_name,
     memory_size,
     timeout,
     output,
     wait,
 ):
     track_cli('deploy-create', PLATFORM_NAME)
     yatai_client = YataiClient()
     bento_name, bento_version = bento.split(':')
     try:
         with Spinner(f'Deploying "{bento}" to AWS Lambda '):
             result = yatai_client.deployment.create_lambda_deployment(
                 name=name,
                 namespace=namespace,
                 bento_name=bento_name,
                 bento_version=bento_version,
                 api_name=api_name,
                 region=region,
                 memory_size=memory_size,
                 timeout=timeout,
                 labels=labels,
                 wait=wait,
             )
         if result.status.status_code != status_pb2.Status.OK:
             error_code, error_message = status_pb_to_error_code_and_message(
                 result.status)
             track_cli(
                 'deploy-create-failure',
                 PLATFORM_NAME,
                 {
                     'error_code': error_code,
                     'error_message': error_message
                 },
             )
             _echo(
                 f'Failed to create AWS Lambda deployment {name} '
                 f'{error_code}:{error_message}',
                 CLI_COLOR_ERROR,
             )
             return
         track_cli('deploy-create-success', PLATFORM_NAME)
         _echo(f'Successfully created AWS Lambda deployment {name}',
               CLI_COLOR_SUCCESS)
         _print_deployment_info(result.deployment, output)
     except BentoMLException as e:
         track_cli('deploy-create-failure', PLATFORM_NAME,
                   {'error_message': str(e)})
         _echo(
             f'Failed to create AWS Lambda deployment {name} {str(e)}',
             CLI_COLOR_ERROR,
         )
コード例 #8
0
 def deploy(
     name,
     bento,
     namespace,
     labels,
     region,
     instance_type,
     instance_count,
     num_of_gunicorn_workers_per_instance,
     api_name,
     timeout,
     output,
     wait,
 ):
     # use the DeploymentOperator name in proto to be consistent with amplitude
     track_cli('deploy-create', PLATFORM_NAME)
     bento_name, bento_version = bento.split(':')
     yatai_client = YataiClient()
     try:
         with Spinner('Deploying Sagemaker deployment '):
             result = yatai_client.deployment.create_sagemaker_deployment(
                 name=name,
                 namespace=namespace,
                 labels=labels,
                 bento_name=bento_name,
                 bento_version=bento_version,
                 instance_count=instance_count,
                 instance_type=instance_type,
                 num_of_gunicorn_workers_per_instance=
                 num_of_gunicorn_workers_per_instance,  # noqa E501
                 api_name=api_name,
                 timeout=timeout,
                 region=region,
                 wait=wait,
             )
         if result.status.status_code != status_pb2.Status.OK:
             error_code, error_message = status_pb_to_error_code_and_message(
                 result.status)
             _echo(
                 f'Failed to create AWS Sagemaker deployment {name} '
                 f'{error_code}:{error_message}',
                 CLI_COLOR_ERROR,
             )
             return
         track_cli('deploy-create-success', PLATFORM_NAME)
         _echo(
             f'Successfully created AWS Sagemaker deployment {name}',
             CLI_COLOR_SUCCESS,
         )
         _print_deployment_info(result.deployment, output)
     except BentoMLException as e:
         _echo(
             'Failed to create AWS Sagemaker deployment {}.: {}'.format(
                 name, str(e)),
             CLI_COLOR_ERROR,
         )
コード例 #9
0
    def containerize(bento, push, tag, build_arg, yatai_url):
        """Containerize specified BentoService.

        BENTO is the target BentoService to be containerized, referenced by its name
        and version in format of name:version. For example: "iris_classifier:v1.2.0"

        `bentoml containerize` command also supports the use of the `latest` tag
        which will automatically use the last built version of your Bento.

        You can provide a tag for the image built by Bento using the
        `--tag` flag. Additionally, you can provide a `--push` flag,
        which will push the built image to the Docker repository specified by the
        image tag.

        You can also prefixing the tag with a hostname for the repository you wish
        to push to.
        e.g. `bentoml containerize IrisClassifier:latest --push --tag
        repo-address.com:username/iris` would build a Docker image called
        `username/iris:latest` and push that to docker repository at repo-address.com.

        By default, the `containerize` command will use the current credentials
        provided by Docker daemon.
        """
        saved_bundle_path = resolve_bundle_path(bento,
                                                pip_installed_bundle_path,
                                                yatai_url)

        _echo(f"Found Bento: {saved_bundle_path}")

        # fmt: off
        bento_metadata: "BentoServiceMetadata" = load_bento_service_metadata(
            saved_bundle_path)  # noqa: E501
        # fmt: on

        bento_tag = f'{bento_metadata.name}:{bento_metadata.version}'
        yatai_client: "YataiClient" = get_yatai_client(yatai_url)
        docker_build_args = {}
        if build_arg:
            for arg in build_arg:
                key, value = arg.split("=", 1)
                docker_build_args[key] = value
        if yatai_url is not None:
            spinner_message = f'Sending containerize RPC to YataiService at {yatai_url}'
        else:
            spinner_message = (
                f'Containerizing {bento_tag} with local YataiService and docker '
                f'daemon from local environment')
        with Spinner(spinner_message):
            tag: str = yatai_client.repository.containerize(
                bento=bento_tag,
                tag=tag,
                build_args=docker_build_args,
                push=push,
            )
            _echo(f'\nBuild container image: {tag}', CLI_COLOR_SUCCESS)
コード例 #10
0
ファイル: aws_sagemaker.py プロジェクト: subhayuroy/BentoML
 def deploy(
     name,
     bento,
     namespace,
     labels,
     region,
     instance_type,
     instance_count,
     num_of_gunicorn_workers_per_instance,
     api_name,
     timeout,
     output,
     wait,
     data_capture_s3_prefix,
     data_capture_sample_percent,
 ):
     _echo(
         message=
         'AWS Sagemaker deployment functionalities are being migrated to a '
         'separate tool and related CLI commands will be deprecated in BentoML '
         'itself, please use https://github.com/bentoml/aws-sagemaker-deploy '
         'going forward.',
         color='yellow',
     )
     # use the DeploymentOperator name in proto to be consistent with amplitude
     bento_name, bento_version = bento.split(':')
     yatai_client = get_default_yatai_client()
     with Spinner('Deploying Sagemaker deployment '):
         result = yatai_client.deployment.create_sagemaker_deployment(
             name=name,
             namespace=namespace,
             labels=labels,
             bento_name=bento_name,
             bento_version=bento_version,
             instance_count=instance_count,
             instance_type=instance_type,
             num_of_gunicorn_workers_per_instance=
             num_of_gunicorn_workers_per_instance,  # noqa E501
             api_name=api_name,
             timeout=timeout,
             region=region,
             wait=wait,
             data_capture_s3_prefix=data_capture_s3_prefix,
             data_capture_sample_percent=data_capture_sample_percent,
         )
     if result.status.status_code != yatai_proto.status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _echo(
         f'Successfully created AWS Sagemaker deployment {name}',
         CLI_COLOR_SUCCESS,
     )
     _print_deployment_info(result.deployment, output)
コード例 #11
0
 def deploy(
     namespace,
     name,
     bento,
     location,
     min_instances,
     max_burst,
     premium_plan_sku,
     labels,
     function_auth_level,
     output,
     wait,
 ):
     track_cli('deploy-create', PLATFORM_NAME)
     bento_name, bento_version = bento.split(':')
     yatai_client = YataiClient()
     try:
         with Spinner(f'Deploying {bento} to Azure Functions'):
             result = yatai_client.deployment.create_azure_functions_deployment(
                 name=name,
                 namespace=namespace,
                 labels=labels,
                 bento_name=bento_name,
                 bento_version=bento_version,
                 location=location,
                 min_instances=min_instances,
                 max_burst=max_burst,
                 premium_plan_sku=premium_plan_sku,
                 function_auth_level=function_auth_level,
                 wait=wait,
             )
             if result.status.status_code != status_pb2.Status.OK:
                 error_code, error_message = status_pb_to_error_code_and_message(
                     result.status
                 )
                 _echo(
                     f'Failed to create Azure Functions deployment {name} '
                     f'{error_code}:{error_message}',
                     CLI_COLOR_ERROR,
                 )
                 return
             track_cli('deploy-create-success', PLATFORM_NAME)
             _echo(
                 f'Successfully created Azure Functions deployment {name}',
                 CLI_COLOR_SUCCESS,
             )
             _print_deployment_info(result.deployment, output)
     except BentoMLException as e:
         _echo(
             f'Failed to create Azure Functions deployment {name}. {str(e)}',
             CLI_COLOR_ERROR,
         )
コード例 #12
0
 def update(
     name,
     namespace,
     bento,
     api_name,
     instance_type,
     instance_count,
     num_of_gunicorn_workers_per_instance,
     timeout,
     output,
     wait,
 ):
     yatai_client = YataiClient()
     track_cli('deploy-update', PLATFORM_NAME)
     if bento:
         bento_name, bento_version = bento.split(':')
     else:
         bento_name = None
         bento_version = None
     try:
         with Spinner('Updating Sagemaker deployment '):
             result = yatai_client.deployment.update_sagemaker_deployment(
                 namespace=namespace,
                 deployment_name=name,
                 bento_name=bento_name,
                 bento_version=bento_version,
                 instance_count=instance_count,
                 instance_type=instance_type,
                 num_of_gunicorn_workers_per_instance=
                 num_of_gunicorn_workers_per_instance,  # noqa E501
                 timeout=timeout,
                 api_name=api_name,
                 wait=wait,
             )
         if result.status.status_code != status_pb2.Status.OK:
             error_code, error_message = status_pb_to_error_code_and_message(
                 result.status)
             _echo(f'Failed to update AWS Sagemaker deployment {name}.'
                   f'{error_code}:{error_message}')
         track_cli('deploy-update-success', PLATFORM_NAME)
         _echo(
             f'Successfully updated AWS Sagemaker deployment {name}',
             CLI_COLOR_SUCCESS,
         )
         _print_deployment_info(result.deployment, output)
     except BentoMLException as e:
         _echo(
             f'Failed to update AWS Sagemaker deployment {name}: {str(e)}',
             CLI_COLOR_ERROR,
         )
コード例 #13
0
ファイル: deployment.py プロジェクト: KarimSamehSadek/BentoML
 def apply(deployment_yaml, output, wait):
     deployment_name = deployment_yaml.get('name')
     yatai_client = YataiClient()
     with Spinner('Applying deployment'):
         result = yatai_client.deployment.apply(deployment_yaml, wait)
     if result.status.status_code != status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _echo(
         f'Successfully applied deployment {deployment_name}',
         CLI_COLOR_SUCCESS,
     )
     _print_deployment_info(result.deployment, output)
コード例 #14
0
 def create(deployment_yaml, output, wait):
     yatai_client = get_default_yatai_client()
     deployment_name = deployment_yaml.get('name')
     with Spinner('Creating deployment '):
         result = yatai_client.deployment.create(deployment_yaml, wait)
     if result.status.status_code != yatai_proto.status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _echo(
         f'Successfully created deployment {deployment_name}',
         CLI_COLOR_SUCCESS,
     )
     _print_deployment_info(result.deployment, output)
コード例 #15
0
 def update(name, namespace, bento, memory_size, timeout, output, wait):
     yatai_client = YataiClient()
     if bento:
         bento_name, bento_version = bento.split(':')
     else:
         bento_name = None
         bento_version = None
     try:
         with Spinner('Updating Lambda deployment '):
             result = yatai_client.deployment.update_lambda_deployment(
                 bento_name=bento_name,
                 bento_version=bento_version,
                 deployment_name=name,
                 namespace=namespace,
                 memory_size=memory_size,
                 timeout=timeout,
                 wait=wait,
             )
             if result.status.status_code != status_pb2.Status.OK:
                 error_code, error_message = status_pb_to_error_code_and_message(
                     result.status)
                 track_cli(
                     'deploy-update-failure',
                     PLATFORM_NAME,
                     {
                         'error_code': error_code,
                         'error_message': error_message
                     },
                 )
                 _echo(
                     f'Failed to update AWS Lambda deployment {name} '
                     f'{error_code}:{error_message}',
                     CLI_COLOR_ERROR,
                 )
                 return
             track_cli('deploy-update-success', PLATFORM_NAME)
             _echo(
                 f'Successfully updated AWS Lambda deployment {name}',
                 CLI_COLOR_SUCCESS,
             )
             _print_deployment_info(result.deployment, output)
     except BentoMLException as e:
         track_cli('deploy-update-failure', PLATFORM_NAME,
                   {'error_message': str(e)})
         _echo(
             f'Failed to updated AWS Lambda deployment {name}: {str(e)}',
             CLI_COLOR_ERROR,
         )
コード例 #16
0
    def update(
        name,
        bento,
        namespace,
        min_size,
        desired_capacity,
        max_size,
        instance_type,
        ami_id,
        output,
        wait,
    ):
        _echo(
            message='AWS EC2 deployment functionalities are being migrated to a '
            'separate tool and related CLI commands will be deprecated in BentoML '
            'itself, please use https://github.com/bentoml/aws-ec2-deploy '
            'going forward.',
            color='yellow',
        )
        yatai_client = get_default_yatai_client()
        if bento:
            bento_name, bento_version = bento.split(":")
        else:
            bento_name = None
            bento_version = None

        with Spinner("Updating EC2 deployment"):
            update_result = yatai_client.deployment.update_ec2_deployment(
                deployment_name=name,
                bento_name=bento_name,
                bento_version=bento_version,
                namespace=namespace,
                min_size=min_size,
                desired_capacity=desired_capacity,
                max_size=max_size,
                instance_type=instance_type,
                ami_id=ami_id,
                wait=wait,
            )
            if update_result.status.status_code != yatai_proto.status_pb2.Status.OK:
                error_code, error_message = status_pb_to_error_code_and_message(
                    update_result.status)
                raise CLIException(f"{error_code}:{error_message}")

        _print_deployment_info(update_result.deployment, output)
        _echo(f"Successfiully updated AWS EC2 deployment '{name}'",
              CLI_COLOR_SUCCESS)
コード例 #17
0
 def deploy(
     name,
     bento,
     namespace,
     labels,
     region,
     instance_type,
     instance_count,
     num_of_gunicorn_workers_per_instance,
     api_name,
     timeout,
     output,
     wait,
     data_capture_s3_prefix,
     data_capture_sample_percent,
 ):
     # use the DeploymentOperator name in proto to be consistent with amplitude
     bento_name, bento_version = bento.split(':')
     yatai_client = get_default_yatai_client()
     with Spinner('Deploying Sagemaker deployment '):
         result = yatai_client.deployment.create_sagemaker_deployment(
             name=name,
             namespace=namespace,
             labels=labels,
             bento_name=bento_name,
             bento_version=bento_version,
             instance_count=instance_count,
             instance_type=instance_type,
             num_of_gunicorn_workers_per_instance=
             num_of_gunicorn_workers_per_instance,  # noqa E501
             api_name=api_name,
             timeout=timeout,
             region=region,
             wait=wait,
             data_capture_s3_prefix=data_capture_s3_prefix,
             data_capture_sample_percent=data_capture_sample_percent,
         )
     if result.status.status_code != yatai_proto.status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _echo(
         f'Successfully created AWS Sagemaker deployment {name}',
         CLI_COLOR_SUCCESS,
     )
     _print_deployment_info(result.deployment, output)
コード例 #18
0
ファイル: azure_functions.py プロジェクト: subhayuroy/BentoML
 def deploy(
     namespace,
     name,
     bento,
     location,
     min_instances,
     max_burst,
     premium_plan_sku,
     labels,
     function_auth_level,
     output,
     wait,
 ):
     _echo(
         message=
         'Azure Functions deployment functionalities are being migrated to '
         'a separate tool and related CLI commands will be deprecated in BentoML '
         'itself, please use https://github.com/bentoml/azure-functions-deploy '
         'going forward.',
         color='yellow',
     )
     bento_name, bento_version = bento.split(':')
     yatai_client = get_default_yatai_client()
     with Spinner(f'Deploying {bento} to Azure Functions'):
         result = yatai_client.deployment.create_azure_functions_deployment(
             name=name,
             namespace=namespace,
             labels=labels,
             bento_name=bento_name,
             bento_version=bento_version,
             location=location,
             min_instances=min_instances,
             max_burst=max_burst,
             premium_plan_sku=premium_plan_sku,
             function_auth_level=function_auth_level,
             wait=wait,
         )
         if result.status.status_code != yatai_proto.status_pb2.Status.OK:
             error_code, error_message = status_pb_to_error_code_and_message(
                 result.status)
             raise CLIException(f'{error_code}:{error_message}')
         _echo(
             f'Successfully created Azure Functions deployment {name}',
             CLI_COLOR_SUCCESS,
         )
         _print_deployment_info(result.deployment, output)
コード例 #19
0
 def update(
     name,
     namespace,
     bento,
     api_name,
     instance_type,
     instance_count,
     num_of_gunicorn_workers_per_instance,
     timeout,
     output,
     wait,
     data_capture_s3_prefix,
     data_capture_sample_percent,
 ):
     yatai_client = get_default_yatai_client()
     if bento:
         bento_name, bento_version = bento.split(':')
     else:
         bento_name = None
         bento_version = None
     with Spinner('Updating Sagemaker deployment '):
         result = yatai_client.deployment.update_sagemaker_deployment(
             namespace=namespace,
             deployment_name=name,
             bento_name=bento_name,
             bento_version=bento_version,
             instance_count=instance_count,
             instance_type=instance_type,
             num_of_gunicorn_workers_per_instance=
             num_of_gunicorn_workers_per_instance,  # noqa E501
             timeout=timeout,
             api_name=api_name,
             wait=wait,
             data_capture_s3_prefix=data_capture_s3_prefix,
             data_capture_sample_percent=data_capture_sample_percent,
         )
     if result.status.status_code != yatai_proto.status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _echo(
         f'Successfully updated AWS Sagemaker deployment {name}',
         CLI_COLOR_SUCCESS,
     )
     _print_deployment_info(result.deployment, output)
コード例 #20
0
def get_state_after_await_action_complete(yatai_service,
                                          name,
                                          namespace,
                                          message,
                                          timeout_limit=600,
                                          wait_time=5):
    start_time = time.time()

    with Spinner(message):
        while (time.time() - start_time) < timeout_limit:
            result = describe_deployment(namespace, name, yatai_service)
            if (result.status.status_code == status_pb2.Status.OK
                    and result.state.state is DeploymentState.PENDING):
                time.sleep(wait_time)
                continue
            else:
                break
    return result
コード例 #21
0
ファイル: deployment.py プロジェクト: sonicviz/BentoML
def get_state_after_await_action_complete(yatai_service,
                                          name,
                                          namespace,
                                          message,
                                          timeout_limit=600,
                                          wait_time=5):
    start_time = time.time()

    with Spinner(message):
        while (time.time() - start_time) < timeout_limit:
            result = yatai_service.DescribeDeployment(
                DescribeDeploymentRequest(deployment_name=name,
                                          namespace=namespace))
            if result.state.state is DeploymentState.PENDING:
                time.sleep(wait_time)
                continue
            else:
                break
    return result
コード例 #22
0
 def deploy(
     name,
     bento,
     namespace,
     region,
     min_size,
     desired_capacity,
     max_size,
     instance_type,
     ami_id,
     output,
     wait,
 ):
     _echo(
         message='AWS EC2 deployment functionalities are being migrated to a '
         'separate tool and related CLI commands will be deprecated in BentoML '
         'itself, please use https://github.com/bentoml/aws-ec2-deploy '
         'going forward.',
         color='yellow',
     )
     yatai_client = get_default_yatai_client()
     bento_name, bento_version = bento.split(":")
     with Spinner(f"Deploying {bento} to AWS EC2"):
         result = yatai_client.deployment.create_ec2_deployment(
             name=name,
             namespace=namespace,
             bento_name=bento_name,
             bento_version=bento_version,
             region=region,
             min_size=min_size,
             desired_capacity=desired_capacity,
             max_size=max_size,
             instance_type=instance_type,
             ami_id=ami_id,
             wait=wait,
         )
     if result.status.status_code != yatai_proto.status_pb2.Status.OK:
         error_code, error_message = status_pb_to_error_code_and_message(
             result.status)
         raise CLIException(f'{error_code}:{error_message}')
     _print_deployment_info(result.deployment, output)
     _echo("Successfully created AWS EC2 deployment", CLI_COLOR_SUCCESS)
コード例 #23
0
 def apply(deployment_yaml, output, wait):
     track_cli('deploy-apply',
               deployment_yaml.get('spec', {}).get('operator'))
     platform_name = deployment_yaml.get('spec', {}).get('operator')
     deployment_name = deployment_yaml.get('name')
     try:
         yatai_client = YataiClient()
         with Spinner('Applying deployment'):
             result = yatai_client.deployment.apply(deployment_yaml, wait)
         if result.status.status_code != status_pb2.Status.OK:
             error_code, error_message = status_pb_to_error_code_and_message(
                 result.status)
             track_cli(
                 'deploy-apply-failure',
                 platform_name,
                 {
                     'error_code': error_code,
                     'error_message': error_message
                 },
             )
             _echo(
                 f'Failed to apply deployment {deployment_name} '
                 f'{error_code}:{error_message}',
                 CLI_COLOR_ERROR,
             )
             return
         track_cli('deploy-create-success', platform_name)
         _echo(
             f'Successfully applied deployment {deployment_name}',
             CLI_COLOR_SUCCESS,
         )
         _print_deployment_info(result.deployment, output)
     except BentoMLException as e:
         track_cli(
             'deploy-apply-failure',
             platform_name,
             {'error_message': str(e)},
         )
         _echo(
             'Failed to apply deployment {name}. Error message: {message}'.
             format(name=deployment_yaml.get('name'), message=e))
コード例 #24
0
 def update(
     name, namespace, bento, min_instances, max_burst, premium_plan_sku, output, wait
 ):
     yatai_client = YataiClient()
     track_cli('deploy-update', PLATFORM_NAME)
     if bento:
         bento_name, bento_version = bento.split(':')
     else:
         bento_name = None
         bento_version = None
     try:
         with Spinner(f'Updating Azure Functions deployment {name}'):
             result = yatai_client.deployment.update_azure_functions_deployment(
                 namespace=namespace,
                 deployment_name=name,
                 bento_name=bento_name,
                 bento_version=bento_version,
                 min_instances=min_instances,
                 max_burst=max_burst,
                 premium_plan_sku=premium_plan_sku,
                 wait=wait,
             )
             if result.status.status_code != status_pb2.Status.OK:
                 error_code, error_message = status_pb_to_error_code_and_message(
                     result.status
                 )
                 _echo(
                     f'Failed to update Azure Functions deployment {name}. '
                     f'{error_code}:{error_message}'
                 )
             track_cli('deploy-update-success', PLATFORM_NAME)
             _echo(
                 f'Successfully update Azure Functions deployment {name}',
                 CLI_COLOR_SUCCESS,
             )
             _print_deployment_info(result.deployment, output)
     except BentoMLException as e:
         _echo(
             f'Failed to update Azure Functions deployment {name}: {str(e)}',
             CLI_COLOR_ERROR,
         )
コード例 #25
0
ファイル: aws_ec2.py プロジェクト: Nikunj3masarani/bentoml
    def update(
        name,
        bento,
        namespace,
        min_size,
        desired_capacity,
        max_size,
        instance_type,
        ami_id,
        output,
        wait,
    ):
        yatai_client = get_default_yatai_client()
        if bento:
            bento_name, bento_version = bento.split(":")
        else:
            bento_name = None
            bento_version = None

        with Spinner("Updating EC2 deployment"):
            update_result = yatai_client.deployment.update_ec2_deployment(
                deployment_name=name,
                bento_name=bento_name,
                bento_version=bento_version,
                namespace=namespace,
                min_size=min_size,
                desired_capacity=desired_capacity,
                max_size=max_size,
                instance_type=instance_type,
                ami_id=ami_id,
                wait=wait,
            )
            if update_result.status.status_code != yatai_proto.status_pb2.Status.OK:
                error_code, error_message = status_pb_to_error_code_and_message(
                    update_result.status)
                raise CLIException(f"{error_code}:{error_message}")

        _print_deployment_info(update_result.deployment, output)
        _echo(f"Successfiully updated AWS EC2 deployment '{name}'",
              CLI_COLOR_SUCCESS)
コード例 #26
0
 def deploy(
     namespace,
     name,
     bento,
     location,
     min_instances,
     max_burst,
     premium_plan_sku,
     labels,
     function_auth_level,
     output,
     wait,
 ):
     bento_name, bento_version = bento.split(':')
     yatai_client = YataiClient()
     with Spinner(f'Deploying {bento} to Azure Functions'):
         result = yatai_client.deployment.create_azure_functions_deployment(
             name=name,
             namespace=namespace,
             labels=labels,
             bento_name=bento_name,
             bento_version=bento_version,
             location=location,
             min_instances=min_instances,
             max_burst=max_burst,
             premium_plan_sku=premium_plan_sku,
             function_auth_level=function_auth_level,
             wait=wait,
         )
         if result.status.status_code != status_pb2.Status.OK:
             error_code, error_message = status_pb_to_error_code_and_message(
                 result.status
             )
             raise CLIException(f'{error_code}:{error_message}')
         _echo(
             f'Successfully created Azure Functions deployment {name}',
             CLI_COLOR_SUCCESS,
         )
         _print_deployment_info(result.deployment, output)
コード例 #27
0
    def containerize(bento, push, tag, username, password):
        """Containerize specified BentoService.

        BENTO is the target BentoService to be containerized, referenced by its name
        and version in format of name:version. For example: "iris_classifier:v1.2.0"

        `bentoml containerize` command also supports the use of the `latest` tag
        which will automatically use the last built version of your Bento.

        You can provide a tag for the image built by Bento using the
        `--docker-image-tag` flag. Additionally, you can provide a `--push` flag,
        which will push the built image to the Docker repository specified by the
        image tag.

        You can also prefixing the tag with a hostname for the repository you wish
        to push to.
        e.g. `bentoml containerize IrisClassifier:latest --push --tag username/iris`
        would build a Docker image called `username/iris:latest` and push that to
        Docker Hub.

        By default, the `containerize` command will use the credentials provided by
        Docker. You may provide your own through `--username` and `--password`.
        """
        bento_service_bundle_path = resolve_bundle_path(
            bento, pip_installed_bundle_path
        )

        _echo(f"Found Bento: {bento_service_bundle_path}")

        name, version = get_name_version_from_tag(bento)
        # use tag name and version where applicable
        if tag is not None:
            name, v = get_name_version_from_tag(tag)
            if v is not None:
                version = v

        name, version = make_bento_name_docker_compatible(name, version)

        full_tag = f"{name}:{version}"
        if full_tag != bento:
            _echo(
                f'Bento tag was changed to be Docker compatible. \n'
                f'"{bento}" -> "{full_tag}"',
                CLI_COLOR_WARNING,
            )

        import docker

        docker_api = docker.APIClient()
        try:
            with Spinner(f"Building Docker image: {name}\n"):
                for line in _echo_docker_api_result(
                    docker_api.build(
                        path=bento_service_bundle_path, tag=full_tag, decode=True,
                    )
                ):
                    _echo(line)
        except docker.errors.APIError as error:
            raise CLIException(f'Could not build Docker image: {error}')

        _echo(
            f'Finished building {full_tag} from {bento}', CLI_COLOR_SUCCESS,
        )

        if push:
            auth_config_payload = (
                {"username": username, "password": password}
                if username or password
                else None
            )

            try:
                with Spinner(f"Pushing docker image to {full_tag}\n"):
                    for line in _echo_docker_api_result(
                        docker_api.push(
                            repository=name,
                            tag=version,
                            stream=True,
                            decode=True,
                            auth_config=auth_config_payload,
                        )
                    ):
                        _echo(line)
                _echo(
                    f'Pushed {full_tag} to {name}', CLI_COLOR_SUCCESS,
                )
            except (docker.errors.APIError, BentoMLException) as error:
                raise CLIException(f'Could not push Docker image: {error}')
コード例 #28
0
    def containerize(bento, push, tag, build_arg, username, password):
        """Containerize specified BentoService.

        BENTO is the target BentoService to be containerized, referenced by its name
        and version in format of name:version. For example: "iris_classifier:v1.2.0"

        `bentoml containerize` command also supports the use of the `latest` tag
        which will automatically use the last built version of your Bento.

        You can provide a tag for the image built by Bento using the
        `--docker-image-tag` flag. Additionally, you can provide a `--push` flag,
        which will push the built image to the Docker repository specified by the
        image tag.

        You can also prefixing the tag with a hostname for the repository you wish
        to push to.
        e.g. `bentoml containerize IrisClassifier:latest --push --tag username/iris`
        would build a Docker image called `username/iris:latest` and push that to
        Docker Hub.

        By default, the `containerize` command will use the credentials provided by
        Docker. You may provide your own through `--username` and `--password`.
        """
        saved_bundle_path = resolve_bundle_path(bento,
                                                pip_installed_bundle_path)

        _echo(f"Found Bento: {saved_bundle_path}")

        bento_metadata = load_bento_service_metadata(saved_bundle_path)
        name = to_valid_docker_image_name(bento_metadata.name)
        version = to_valid_docker_image_version(bento_metadata.version)

        if not tag:
            _echo("Tag not specified, using tag parsed from "
                  f"BentoService: '{name}:{version}'")
            tag = f"{name}:{version}"
        if ":" not in tag:
            _echo(
                "Image version not specified, using version parsed "
                f"from BentoService: '{version}'",
                CLI_COLOR_WARNING,
            )
            tag = f"{tag}:{version}"

        docker_build_args = {}
        if build_arg:
            for arg in build_arg:
                key, value = arg.split("=")
                docker_build_args[key] = value

        import docker

        docker_api = docker.APIClient()
        try:
            with Spinner(f"Building Docker image {tag} from {bento} \n"):
                for line in echo_docker_api_result(
                        docker_api.build(
                            path=saved_bundle_path,
                            tag=tag,
                            decode=True,
                            buildargs=docker_build_args,
                        )):
                    _echo(line)
        except docker.errors.APIError as error:
            raise CLIException(f'Could not build Docker image: {error}')

        _echo(
            f'Finished building {tag} from {bento}',
            CLI_COLOR_SUCCESS,
        )

        if push:
            auth_config_payload = ({
                "username": username,
                "password": password
            } if username or password else None)

            try:
                with Spinner(f"Pushing docker image to {tag}\n"):
                    for line in echo_docker_api_result(
                            docker_api.push(
                                repository=tag,
                                stream=True,
                                decode=True,
                                auth_config=auth_config_payload,
                            )):
                        _echo(line)
                _echo(
                    f'Pushed {tag} to {name}',
                    CLI_COLOR_SUCCESS,
                )
            except (docker.errors.APIError, BentoMLException) as error:
                raise CLIException(f'Could not push Docker image: {error}')