コード例 #1
0
ファイル: python_api.py プロジェクト: krtk30/BentoML
def apply_deployment(deployment_info, yatai_service=None):
    if yatai_service is None:
        from bentoml.yatai import get_yatai_service

        yatai_service = get_yatai_service()

    try:
        if isinstance(deployment_info, dict):
            deployment_pb = deployment_dict_to_pb(deployment_info)
        elif isinstance(deployment_info, str):
            deployment_pb = deployment_yaml_string_to_pb(deployment_info)
        else:
            raise YataiDeploymentException(
                'Unexpected argument type, expect deployment info to be str in yaml '
                'format or a dict, instead got: {}'.format(
                    str(type(deployment_info))))

        validation_errors = validate_deployment_pb_schema(deployment_pb)
        if validation_errors:
            return ApplyDeploymentResponse(status=Status.INVALID_ARGUMENT(
                'Failed to validate deployment: {errors}'.format(
                    errors=validation_errors)))

        return yatai_service.ApplyDeployment(
            ApplyDeploymentRequest(deployment=deployment_pb))
    except BentoMLException as error:
        return ApplyDeploymentResponse(status=Status.INTERNAL(str(error)))
コード例 #2
0
    def apply(self, deployment_info, wait):
        if isinstance(deployment_info, dict):
            deployment_pb = deployment_dict_to_pb(deployment_info)
        elif isinstance(deployment_info, str):
            deployment_pb = deployment_yaml_string_to_pb(deployment_info)
        elif isinstance(deployment_info, Deployment):
            deployment_pb = deployment_info
        else:
            raise YataiDeploymentException(
                'Unexpected argument type, expect deployment info to be str in yaml '
                'format or a dict or a deployment protobuf obj, instead got: {}'
                .format(str(type(deployment_info))))

        validation_errors = validate_deployment_pb_schema(deployment_pb)
        if validation_errors:
            raise YataiDeploymentException(
                f'Failed to validate deployment {deployment_pb.name}: '
                f'{validation_errors}')

        apply_result = self.yatai_service.ApplyDeployment(
            ApplyDeploymentRequest(deployment=deployment_pb))
        if apply_result.status.status_code != status_pb2.Status.OK:
            error_code, error_message = status_pb_to_error_code_and_message(
                apply_result.status)
            raise YataiDeploymentException(f'{error_code}:{error_message}')
        if wait:
            self._wait_deployment_action_complete(deployment_pb.name,
                                                  deployment_pb.namespace)
        return self.get(namespace=deployment_pb.namespace,
                        name=deployment_pb.name)
コード例 #3
0
ファイル: deployment_api.py プロジェクト: zeuxion23/BentoML
    def create(self, deployment_info, wait):
        if isinstance(deployment_info, dict):
            deployment_pb = deployment_dict_to_pb(deployment_info)
        elif isinstance(deployment_info, str):
            deployment_pb = deployment_yaml_string_to_pb(deployment_info)
        elif isinstance(deployment_info, Deployment):
            deployment_pb = deployment_info
        else:
            raise YataiDeploymentException(
                'Unexpected argument type, expect deployment info to be str in yaml '
                'format or a dict or a deployment protobuf obj, instead got: {}'.format(
                    str(type(deployment_info))
                )
            )

        validation_errors = validate_deployment_pb_schema(deployment_pb)
        if validation_errors:
            raise YataiDeploymentException(
                f'Failed to validate deployment {deployment_pb.name}: '
                f'{validation_errors}'
            )

        # Make sure there is no active deployment with the same deployment name
        get_deployment_pb = self.yatai_service.GetDeployment(
            GetDeploymentRequest(
                deployment_name=deployment_pb.name, namespace=deployment_pb.namespace
            )
        )
        if get_deployment_pb.status.status_code != status_pb2.Status.NOT_FOUND:
            raise BentoMLException(
                f'Deployment "{deployment_pb.name}" already existed, use Update or '
                f'Apply for updating existing deployment, delete the deployment, '
                f'or use a different deployment name'
            )
        apply_result = self.yatai_service.ApplyDeployment(
            ApplyDeploymentRequest(deployment=deployment_pb)
        )
        if apply_result.status.status_code != status_pb2.Status.OK:
            error_code, error_message = status_pb_to_error_code_and_message(
                apply_result.status
            )
            raise YataiDeploymentException(f'{error_code}:{error_message}')
        if wait:
            self._wait_deployment_action_complete(
                deployment_pb.name, deployment_pb.namespace
            )
        return self.get(namespace=deployment_pb.namespace, name=deployment_pb.name)
コード例 #4
0
    def apply(deployment_yaml, output, wait):
        track_cli('deploy-apply', deployment_yaml.get('spec', {}).get('operator'))
        try:
            deployment_pb = deployment_yaml_to_pb(deployment_yaml)
            yatai_service = get_yatai_service()
            result = yatai_service.ApplyDeployment(
                ApplyDeploymentRequest(deployment=deployment_pb)
            )
            if result.status.status_code != Status.OK:
                _echo(
                    'Failed to apply deployment {name}. code: {error_code}, message: '
                    '{error_message}'.format(
                        name=deployment_pb.name,
                        error_code=Status.Code.Name(result.status.status_code),
                        error_message=result.status.error_message,
                    ),
                    CLI_COLOR_ERROR,
                )
            else:
                if wait:
                    result_state = get_state_after_await_action_complete(
                        yatai_service=yatai_service,
                        name=deployment_pb.name,
                        namespace=deployment_pb.namespace,
                        message='Applying deployment...',
                    )
                    result.deployment.state.CopyFrom(result_state.state)

                _echo(
                    'Successfully applied spec to deployment {}'.format(
                        deployment_pb.name
                    ),
                    CLI_COLOR_SUCCESS,
                )
                display_deployment_info(result.deployment, output)
        except BentoMLException as e:
            _echo(
                'Failed to apply deployment {name}. Error message: {message}'.format(
                    name=deployment_pb.name, message=e
                )
            )
コード例 #5
0
ファイル: deployment_api.py プロジェクト: yoavz/BentoML
    def apply_deployment(self, deployment_info):
        if isinstance(deployment_info, dict):
            deployment_pb = deployment_dict_to_pb(deployment_info)
        elif isinstance(deployment_info, str):
            deployment_pb = deployment_yaml_string_to_pb(deployment_info)
        elif isinstance(deployment_info, Deployment):
            deployment_pb = deployment_info
        else:
            raise YataiDeploymentException(
                'Unexpected argument type, expect deployment info to be str in yaml '
                'format or a dict or a deployment protobuf obj, instead got: {}'
                .format(str(type(deployment_info))))

        validation_errors = validate_deployment_pb_schema(deployment_pb)
        if validation_errors:
            return ApplyDeploymentResponse(status=Status.INVALID_ARGUMENT(
                'Failed to validate deployment: {errors}'.format(
                    errors=validation_errors)))

        return self.yatai_service.ApplyDeployment(
            ApplyDeploymentRequest(deployment=deployment_pb))
コード例 #6
0
ファイル: deployment.py プロジェクト: sonicviz/BentoML
    def create(
        name,
        bento,
        platform,
        output,
        namespace,
        labels,
        annotations,
        region,
        instance_type,
        instance_count,
        api_name,
        kube_namespace,
        replicas,
        service_name,
        service_type,
        wait,
    ):
        # converting platform parameter to DeploymentOperator name in proto
        # e.g. 'aws-lambda' to 'AWS_LAMBDA'
        platform = platform.replace('-', '_').upper()
        operator = DeploymentSpec.DeploymentOperator.Value(platform)

        track_cli('deploy-create', platform)

        yatai_service = get_yatai_service()

        # Make sure there is no active deployment with the same deployment name
        get_deployment = yatai_service.GetDeployment(
            GetDeploymentRequest(deployment_name=name, namespace=namespace))
        if get_deployment.status.status_code != Status.NOT_FOUND:
            raise BentoMLDeploymentException(
                'Deployment {name} already existed, please use update or apply command'
                ' instead'.format(name=name))

        if operator == DeploymentSpec.AWS_SAGEMAKER:
            if not api_name:
                raise click.BadParameter(
                    'api-name is required for Sagemaker deployment')

            sagemaker_operator_config = DeploymentSpec.SageMakerOperatorConfig(
                region=region or config().get('aws', 'default_region'),
                instance_count=instance_count
                or config().getint('sagemaker', 'instance_count'),
                instance_type=instance_type
                or config().get('sagemaker', 'instance_type'),
                api_name=api_name,
            )
            spec = DeploymentSpec(
                sagemaker_operator_config=sagemaker_operator_config)
        elif operator == DeploymentSpec.AWS_LAMBDA:
            aws_lambda_operator_config = DeploymentSpec.AwsLambdaOperatorConfig(
                region=region or config().get('aws', 'default_region'))
            if api_name:
                aws_lambda_operator_config.api_name = api_name
            spec = DeploymentSpec(
                aws_lambda_operator_config=aws_lambda_operator_config)
        elif operator == DeploymentSpec.GCP_FUNCTION:
            gcp_function_operator_config = DeploymentSpec.GcpFunctionOperatorConfig(
                region=region
                or config().get('google-cloud', 'default_region'))
            if api_name:
                gcp_function_operator_config.api_name = api_name
            spec = DeploymentSpec(
                gcp_function_operator_config=gcp_function_operator_config)
        elif operator == DeploymentSpec.KUBERNETES:
            kubernetes_operator_config = DeploymentSpec.KubernetesOperatorConfig(
                kube_namespace=kube_namespace,
                replicas=replicas,
                service_name=service_name,
                service_type=service_type,
            )
            spec = DeploymentSpec(
                kubernetes_operator_config=kubernetes_operator_config)
        else:
            raise BentoMLDeploymentException(
                'Custom deployment is not supported in the current version of BentoML'
            )

        bento_name, bento_version = bento.split(':')
        spec.bento_name = bento_name
        spec.bento_version = bento_version
        spec.operator = operator

        result = yatai_service.ApplyDeployment(
            ApplyDeploymentRequest(deployment=Deployment(
                namespace=namespace,
                name=name,
                annotations=parse_key_value_pairs(annotations),
                labels=parse_key_value_pairs(labels),
                spec=spec,
            )))
        if result.status.status_code != Status.OK:
            _echo(
                'Failed to create deployment {name}. {error_code}: '
                '{error_message}'.format(
                    name=name,
                    error_code=Status.Code.Name(result.status.status_code),
                    error_message=result.status.error_message,
                ),
                CLI_COLOR_ERROR,
            )
        else:
            if wait:
                result_state = get_state_after_await_action_complete(
                    yatai_service=yatai_service,
                    name=name,
                    namespace=namespace,
                    message='Creating deployment ',
                )
                result.deployment.state.CopyFrom(result_state.state)

            _echo('Successfully created deployment {}'.format(name),
                  CLI_COLOR_SUCCESS)
            display_deployment_info(result.deployment, output)
コード例 #7
0
    def apply(
        bento,
        deployment_name,
        platform,
        output,
        namespace,
        labels,
        annotations,
        region,
        stage,
        instance_type,
        instance_count,
        api_name,
        kube_namespace,
        replicas,
        service_name,
        service_type,
    ):
        track_cli('deploy-apply', platform)

        bento_name, bento_verison = bento.split(':')
        spec = DeploymentSpec(
            bento_name=bento_name,
            bento_verison=bento_verison,
            operator=get_deployment_operator_type(platform),
        )
        if platform == 'aws_sagemaker':
            spec.sagemaker_operator_config = DeploymentSpec.SageMakerOperatorConfig(
                region=region,
                instance_count=instance_count,
                instance_type=instance_type,
                api_name=api_name,
            )
        elif platform == 'aws_lambda':
            spec.aws_lambda_operator_config = DeploymentSpec.AwsLambdaOperatorConfig(
                region=region, stage=stage)
        elif platform == 'gcp_function':
            spec.gcp_function_operator_config = \
                DeploymentSpec.GcpFunctionOperatorConfig(
                region=region, stage=stage
            )
        elif platform == 'kubernetes':
            spec.kubernetes_operator_config = DeploymentSpec.KubernetesOperatorConfig(
                kube_namespace=kube_namespace,
                replicas=replicas,
                service_name=service_name,
                service_type=service_type,
            )
        else:
            raise BentoMLDeploymentException(
                'Custom deployment is not supported in current version of BentoML'
            )

        result = get_yatai_service().ApplyDeployment(
            ApplyDeploymentRequest(deployment=Deployment(
                namespace=namespace,
                name=deployment_name,
                annotations=parse_key_value_pairs(annotations),
                labels=parse_key_value_pairs(labels),
                spec=spec,
            )))
        if result.status.status_code != Status.OK:
            _echo(
                'Failed to apply deployment {name}. code: {error_code}, message: '
                '{error_message}'.format(
                    name=deployment_name,
                    error_code=Status.Code.Name(result.status.status_code),
                    error_message=result.status.error_message,
                ),
                CLI_COLOR_ERROR,
            )
        else:
            _echo(
                'Successfully apply deployment {}'.format(deployment_name),
                CLI_COLOR_SUCCESS,
            )
            display_deployment_info(result.deployment, output)