Exemplo n.º 1
0
    def _add(self, deployment_pb, bento_pb, bento_path):
        if loader._is_remote_path(bento_path):
            with loader._resolve_remote_bundle_path(bento_path) as local_path:
                return self._add(deployment_pb, bento_pb, local_path)

        deployment_spec = deployment_pb.spec
        lambda_deployment_config = deployment_spec.aws_lambda_operator_config
        bento_service_metadata = bento_pb.bento.bento_service_metadata
        lambda_s3_bucket = generate_aws_compatible_string(
            'btml-{namespace}-{name}-{random_string}'.format(
                namespace=deployment_pb.namespace,
                name=deployment_pb.name,
                random_string=uuid.uuid4().hex[:6].lower(),
            ))
        try:
            create_s3_bucket_if_not_exists(lambda_s3_bucket,
                                           lambda_deployment_config.region)
            _deploy_lambda_function(
                deployment_pb=deployment_pb,
                bento_service_metadata=bento_service_metadata,
                deployment_spec=deployment_spec,
                lambda_s3_bucket=lambda_s3_bucket,
                lambda_deployment_config=lambda_deployment_config,
                bento_path=bento_path,
            )
            return ApplyDeploymentResponse(status=Status.OK(),
                                           deployment=deployment_pb)
        except BentoMLException as error:
            if lambda_s3_bucket and lambda_deployment_config:
                cleanup_s3_bucket_if_exist(lambda_s3_bucket,
                                           lambda_deployment_config.region)
            raise error
Exemplo n.º 2
0
    def _update(self, deployment_pb, current_deployment, bento_pb, bento_path):
        if loader._is_remote_path(bento_path):
            with loader._resolve_remote_bundle_path(bento_path) as local_path:
                return self._update(deployment_pb, current_deployment,
                                    bento_pb, local_path)
        updated_deployment_spec = deployment_pb.spec
        updated_lambda_deployment_config = (
            updated_deployment_spec.aws_lambda_operator_config)
        updated_bento_service_metadata = bento_pb.bento.bento_service_metadata
        describe_result = self.describe(deployment_pb)
        if describe_result.status.status_code != status_pb2.Status.OK:
            error_code, error_message = status_pb_to_error_code_and_message(
                describe_result.status)
            raise YataiDeploymentException(
                f'Failed fetching Lambda deployment current status - '
                f'{error_code}:{error_message}')
        latest_deployment_state = json.loads(describe_result.state.info_json)
        if 's3_bucket' in latest_deployment_state:
            lambda_s3_bucket = latest_deployment_state['s3_bucket']
        else:
            raise BentoMLException(
                'S3 Bucket is missing in the AWS Lambda deployment, please make sure '
                'it exists and try again')

        _deploy_lambda_function(
            deployment_pb=deployment_pb,
            bento_service_metadata=updated_bento_service_metadata,
            deployment_spec=updated_deployment_spec,
            lambda_s3_bucket=lambda_s3_bucket,
            lambda_deployment_config=updated_lambda_deployment_config,
            bento_path=bento_path,
        )

        return ApplyDeploymentResponse(deployment=deployment_pb,
                                       status=Status.OK())
Exemplo n.º 3
0
 def _add(self, deployment_pb, bento_pb, bento_path):
     if loader._is_remote_path(bento_path):
         with loader._resolve_remote_bundle_path(bento_path) as local_path:
             return self._add(deployment_pb, bento_pb, local_path)
     try:
         _deploy_azure_functions(
             deployment_spec=deployment_pb.spec,
             deployment_name=deployment_pb.name,
             namespace=deployment_pb.namespace,
             bento_pb=bento_pb,
             bento_path=bento_path,
         )
         return ApplyDeploymentResponse(status=Status.OK(),
                                        deployment=deployment_pb)
     except AzureServiceError as error:
         resource_group_name, _, _, _, _, = _generate_azure_resource_names(
             deployment_pb.namespace, deployment_pb.name)
         logger.debug(
             'Failed to create Azure Functions. Cleaning up Azure resources'
         )
         try:
             _call_az_cli(
                 command=[
                     'az',
                     'group',
                     'delete',
                     '-y',
                     '--name',
                     resource_group_name,
                 ],
                 message='delete Azure resource group',
             )
         except AzureServiceError:
             pass
         raise error
Exemplo n.º 4
0
    def add(self, deployment_pb):
        try:
            deployment_spec = deployment_pb.spec
            deployment_spec.aws_lambda_operator_config.region = (
                deployment_spec.aws_lambda_operator_config.region
                or get_default_aws_region())
            if not deployment_spec.aws_lambda_operator_config.region:
                raise InvalidArgument('AWS region is missing')

            bento_pb = self.yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                ))
            if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
                raise BentoMLException(
                    'BentoML currently not support {} repository'.format(
                        BentoUri.StorageType.Name(bento_pb.bento.uri.type)))

            return self._add(deployment_pb, bento_pb, bento_pb.bento.uri.uri)
        except BentoMLException as error:
            deployment_pb.state.state = DeploymentState.ERROR
            deployment_pb.state.error_message = f'Error: {str(error)}'
            return ApplyDeploymentResponse(status=error.status_proto,
                                           deployment=deployment_pb)
Exemplo n.º 5
0
    def update(self, deployment_pb, previous_deployment):
        try:
            ensure_sam_available_or_raise()
            ensure_docker_available_or_raise()

            deployment_spec = deployment_pb.spec
            bento_pb = self.yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                )
            )
            if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
                raise BentoMLException(
                    'BentoML currently not support {} repository'.format(
                        BentoUri.StorageType.Name(bento_pb.bento.uri.type)
                    )
                )

            return self._update(
                deployment_pb, previous_deployment, bento_pb, bento_pb.bento.uri.uri
            )
        except BentoMLException as error:
            deployment_pb.state.state = DeploymentState.ERROR
            deployment_pb.state.error_message = f'Error: {str(error)}'
            return ApplyDeploymentResponse(
                status=error.status_code, deployment_pb=deployment_pb
            )
Exemplo n.º 6
0
    def update(self, deployment_pb, previous_deployment):
        try:
            ensure_sam_available_or_raise()
            ensure_docker_available_or_raise()
            deployment_spec = deployment_pb.spec
            ec2_deployment_config = deployment_spec.aws_ec2_operator_config
            ec2_deployment_config.region = (ec2_deployment_config.region
                                            or get_default_aws_region())
            if not ec2_deployment_config.region:
                raise InvalidArgument("AWS region is missing")

            bento_pb = self.yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                ))

            if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
                raise BentoMLException(
                    "BentoML currently not support {} repository".format(
                        BentoUri.StorageType.Name(bento_pb.bento.uri.type)))

            return self._update(
                deployment_pb,
                previous_deployment,
                bento_pb.bento.uri.uri,
                ec2_deployment_config.region,
            )
        except BentoMLException as error:
            deployment_pb.state.state = DeploymentState.ERROR
            deployment_pb.state.error_message = f"Error: {str(error)}"
            return ApplyDeploymentResponse(status=error.status_proto,
                                           deployment=deployment_pb)
Exemplo n.º 7
0
    def _add(self, deployment_pb, bento_pb, bento_path):
        try:
            if loader._is_remote_path(bento_path):
                with loader._resolve_remote_bundle_path(
                        bento_path) as local_path:
                    return self._add(deployment_pb, bento_pb, local_path)

            deployment_spec = deployment_pb.spec
            aws_ec2_deployment_config = deployment_spec.aws_ec2_operator_config

            user_id = get_aws_user_id()
            artifact_s3_bucket_name = generate_aws_compatible_string(
                "btml-{user_id}-{namespace}".format(
                    user_id=user_id,
                    namespace=deployment_pb.namespace,
                ))
            create_s3_bucket_if_not_exists(artifact_s3_bucket_name,
                                           aws_ec2_deployment_config.region)
            self.deploy_service(
                deployment_pb,
                deployment_spec,
                bento_path,
                aws_ec2_deployment_config,
                artifact_s3_bucket_name,
                aws_ec2_deployment_config.region,
            )
        except BentoMLException as error:
            if artifact_s3_bucket_name and aws_ec2_deployment_config.region:
                cleanup_s3_bucket_if_exist(artifact_s3_bucket_name,
                                           aws_ec2_deployment_config.region)
            raise error
        return ApplyDeploymentResponse(status=Status.OK(),
                                       deployment=deployment_pb)
Exemplo n.º 8
0
    def add(self, deployment_pb):
        try:
            deployment_spec = deployment_pb.spec
            sagemaker_config = deployment_spec.sagemaker_operator_config
            sagemaker_config.region = (sagemaker_config.region
                                       or get_default_aws_region())

            if sagemaker_config is None:
                raise YataiDeploymentException(
                    "Sagemaker configuration is missing.")

            bento_pb = self.yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                ))
            if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
                raise BentoMLException(
                    "BentoML currently not support {} repository".format(
                        BentoUri.StorageType.Name(bento_pb.bento.uri.type)))
            return self._add(deployment_pb, bento_pb, bento_pb.bento.uri.uri)

        except BentoMLException as error:
            deployment_pb.state.state = DeploymentState.ERROR
            deployment_pb.state.error_message = (
                f"Error creating SageMaker deployment: {str(error)}")
            return ApplyDeploymentResponse(status=error.status_proto,
                                           deployment=deployment_pb)
Exemplo n.º 9
0
    def _add(self, deployment_pb, bento_pb, bento_path):
        if loader._is_remote_path(bento_path):
            with loader._resolve_remote_bundle_path(bento_path) as local_path:
                return self._add(deployment_pb, bento_pb, local_path)

        deployment_spec = deployment_pb.spec
        sagemaker_config = deployment_spec.sagemaker_operator_config

        raise_if_api_names_not_found_in_bento_service_metadata(
            bento_pb.bento.bento_service_metadata, [sagemaker_config.api_name])

        sagemaker_client = boto3.client("sagemaker", sagemaker_config.region)

        with TempDirectory() as temp_dir:
            sagemaker_project_dir = os.path.join(temp_dir,
                                                 deployment_spec.bento_name)
            _init_sagemaker_project(
                sagemaker_project_dir,
                bento_path,
                bento_pb.bento.bento_service_metadata.env.docker_base_image,
            )
            ecr_image_path = create_and_push_docker_image_to_ecr(
                sagemaker_config.region,
                deployment_spec.bento_name,
                deployment_spec.bento_version,
                sagemaker_project_dir,
            )

        try:
            (
                sagemaker_model_name,
                sagemaker_endpoint_config_name,
                sagemaker_endpoint_name,
            ) = _get_sagemaker_resource_names(deployment_pb)

            _create_sagemaker_model(sagemaker_client, sagemaker_model_name,
                                    ecr_image_path, sagemaker_config)
            _create_sagemaker_endpoint_config(
                sagemaker_client,
                sagemaker_model_name,
                sagemaker_endpoint_config_name,
                sagemaker_config,
            )
            _create_sagemaker_endpoint(
                sagemaker_client,
                sagemaker_endpoint_name,
                sagemaker_endpoint_config_name,
            )
        except AWSServiceError as e:
            delete_sagemaker_deployment_resources_if_exist(deployment_pb)
            raise e

        return ApplyDeploymentResponse(status=Status.OK(),
                                       deployment=deployment_pb)
Exemplo n.º 10
0
 def _update(self, deployment_pb, current_deployment, bento_pb, bento_path):
     if loader._is_remote_path(bento_path):
         with loader._resolve_remote_bundle_path(bento_path) as local_path:
             return self._update(deployment_pb, current_deployment,
                                 bento_pb, local_path)
     if (deployment_pb.spec.bento_name != current_deployment.spec.bento_name
             or deployment_pb.spec.bento_version !=
             current_deployment.spec.bento_version):
         logger.debug(
             'BentoService tag is different from current Azure Functions '
             'deployment, creating new Azure Functions project and push to ACR'
         )
         _update_azure_functions(
             deployment_spec=deployment_pb.spec,
             deployment_name=deployment_pb.name,
             namespace=deployment_pb.namespace,
             bento_pb=bento_pb,
             bento_path=bento_path,
         )
     (
         resource_group_name,
         _,
         function_plan_name,
         _,
         _,
     ) = _generate_azure_resource_names(namespace=deployment_pb.namespace,
                                        deployment_name=deployment_pb.name)
     _call_az_cli(
         command=[
             'az',
             'functionapp',
             'plan',
             'update',
             '--name',
             function_plan_name,
             '--resource-group',
             resource_group_name,
             '--max-burst',
             str(deployment_pb.spec.azure_functions_operator_config.
                 max_burst),
             '--min-instances',
             str(deployment_pb.spec.azure_functions_operator_config.
                 min_instances),
             '--sku',
             deployment_pb.spec.azure_functions_operator_config.
             premium_plan_sku,
         ],
         message='update Azure functionapp plan',
     )
     return ApplyDeploymentResponse(deployment=deployment_pb,
                                    status=Status.OK())
Exemplo n.º 11
0
 def update(self, deployment_pb, previous_deployment):
     try:
         bento_repo_pb = self.yatai_service.GetBento(
             GetBentoRequest(
                 bento_name=deployment_pb.spec.bento_name,
                 bento_version=deployment_pb.spec.bento_version,
             ))
         bento_pb = bento_repo_pb.bento
         return self._update(deployment_pb, previous_deployment, bento_pb,
                             bento_pb.uri.uri)
     except BentoMLException as error:
         deployment_pb.state.state = DeploymentState.ERROR
         deployment_pb.state.error_message = (
             f'Encounter error when updating Azure Functions deployment: '
             f'{str(error)}')
         return ApplyDeploymentResponse(status=error.status_proto,
                                        deployment=deployment_pb)
Exemplo n.º 12
0
 def add(self, deployment_pb):
     try:
         deployment_spec = deployment_pb.spec
         if not deployment_spec.azure_functions_operator_config.location:
             raise YataiDeploymentException(
                 'Azure Functions parameter "location" is missing')
         bento_repo_pb = self.yatai_service.GetBento(
             GetBentoRequest(
                 bento_name=deployment_spec.bento_name,
                 bento_version=deployment_spec.bento_version,
             ))
         return self._add(deployment_pb, bento_repo_pb.bento,
                          bento_repo_pb.bento.uri.uri)
     except BentoMLException as error:
         deployment_pb.state.state = DeploymentState.ERROR
         deployment_pb.state.error_message = f'Error: {str(error)}'
         return ApplyDeploymentResponse(status=error.status_proto,
                                        deployment=deployment_pb)
Exemplo n.º 13
0
 def update(self, deployment_pb, previous_deployment):
     try:
         deployment_spec = deployment_pb.spec
         bento_pb = self.yatai_service.GetBento(
             GetBentoRequest(
                 bento_name=deployment_spec.bento_name,
                 bento_version=deployment_spec.bento_version,
             ))
         if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
             raise BentoMLException(
                 "BentoML currently not support {} repository".format(
                     BentoUri.StorageType.Name(bento_pb.bento.uri.type)))
         return self._update(deployment_pb, previous_deployment, bento_pb,
                             bento_pb.bento.uri.uri)
     except BentoMLException as error:
         deployment_pb.state.state = DeploymentState.ERROR
         deployment_pb.state.error_message = (
             f"Error updating SageMaker deployment: {str(error)}")
         return ApplyDeploymentResponse(status=error.status_proto,
                                        deployment=deployment_pb)
Exemplo n.º 14
0
    def _update(self, deployment_pb, previous_deployment_pb, bento_path, region):
        if loader._is_remote_path(bento_path):
            with loader._resolve_remote_bundle_path(bento_path) as local_path:
                return self._update(
                    deployment_pb, previous_deployment_pb, local_path, region
                )

        updated_deployment_spec = deployment_pb.spec
        updated_deployment_config = updated_deployment_spec.aws_ec2_operator_config

        describe_result = self.describe(deployment_pb)
        if describe_result.status.status_code != status_pb2.Status.OK:
            error_code, error_message = status_pb_to_error_code_and_message(
                describe_result.status
            )
            raise YataiDeploymentException(
                f"Failed fetching ec2 deployment current status - "
                f"{error_code}:{error_message}"
            )

        previous_deployment_state = json.loads(describe_result.state.info_json)
        if "S3Bucket" in previous_deployment_state:
            s3_bucket_name = previous_deployment_state.get("S3Bucket")
        else:
            raise BentoMLException(
                "S3 Bucket is missing in the AWS EC2 deployment, please make sure "
                "it exists and try again"
            )

        self.deploy_service(
            deployment_pb,
            updated_deployment_spec,
            bento_path,
            updated_deployment_config,
            s3_bucket_name,
            region,
        )

        return ApplyDeploymentResponse(status=Status.OK(), deployment=deployment_pb)
Exemplo n.º 15
0
    def ApplyDeployment(self, request, context=None):
        try:
            # apply default namespace if not set
            request.deployment.namespace = (request.deployment.namespace
                                            or self.default_namespace)

            validation_errors = validate_deployment_pb(request.deployment)
            if validation_errors:
                raise InvalidArgument(
                    'Failed to validate deployment. {errors}'.format(
                        errors=validation_errors))

            previous_deployment = self.deployment_store.get(
                request.deployment.name, request.deployment.namespace)
            if not previous_deployment:
                request.deployment.created_at.GetCurrentTime()
            request.deployment.last_updated_at.GetCurrentTime()

            self.deployment_store.insert_or_update(request.deployment)
            # find deployment operator based on deployment spec
            operator = get_deployment_operator(self, request.deployment)

            # deploying to target platform
            if previous_deployment:
                response = operator.update(request.deployment,
                                           previous_deployment)
            else:
                response = operator.add(request.deployment)

            if response.status.status_code == status_pb2.Status.OK:
                # update deployment state
                if response and response.deployment:
                    self.deployment_store.insert_or_update(response.deployment)
                else:
                    raise BentoMLException(
                        "DeploymentOperator Internal Error: failed to add or update "
                        "deployment metadata to database")
                logger.info(
                    "ApplyDeployment (%s, namespace %s) succeeded",
                    request.deployment.name,
                    request.deployment.namespace,
                )
            else:
                if not previous_deployment:
                    # When failed to create the deployment, delete it from active
                    # deployments records
                    self.deployment_store.delete(request.deployment.name,
                                                 request.deployment.namespace)
                logger.debug(
                    "ApplyDeployment (%s, namespace %s) failed: %s",
                    request.deployment.name,
                    request.deployment.namespace,
                    response.status.error_message,
                )

            return response

        except BentoMLException as e:
            logger.error("RPC ERROR ApplyDeployment: %s", e)
            return ApplyDeploymentResponse(status=e.status_proto)
        except Exception as e:
            logger.error("URPC ERROR ApplyDeployment: %s", e)
            return ApplyDeploymentResponse(status=Status.INTERNAL(str(e)))
Exemplo n.º 16
0
    def _update(self, deployment_pb, current_deployment, bento_pb, bento_path):
        if loader._is_remote_path(bento_path):
            with loader._resolve_remote_bundle_path(bento_path) as local_path:
                return self._update(deployment_pb, current_deployment,
                                    bento_pb, local_path)
        updated_deployment_spec = deployment_pb.spec
        updated_sagemaker_config = updated_deployment_spec.sagemaker_operator_config
        sagemaker_client = boto3.client(
            "sagemaker", updated_sagemaker_config.region
            or get_default_aws_region())

        try:
            raise_if_api_names_not_found_in_bento_service_metadata(
                bento_pb.bento.bento_service_metadata,
                [updated_sagemaker_config.api_name],
            )
            describe_latest_deployment_state = self.describe(deployment_pb)
            current_deployment_spec = current_deployment.spec
            current_sagemaker_config = current_deployment_spec.sagemaker_operator_config
            latest_deployment_state = json.loads(
                describe_latest_deployment_state.state.info_json)

            current_ecr_image_tag = latest_deployment_state[
                "ProductionVariants"][0]["DeployedImages"][0]["SpecifiedImage"]
            if (updated_deployment_spec.bento_name !=
                    current_deployment_spec.bento_name
                    or updated_deployment_spec.bento_version !=
                    current_deployment_spec.bento_version):
                logger.debug(
                    "BentoService tag is different from current deployment, "
                    "creating new docker image and push to ECR")
                with TempDirectory() as temp_dir:
                    sagemaker_project_dir = os.path.join(
                        temp_dir, updated_deployment_spec.bento_name)
                    _init_sagemaker_project(
                        sagemaker_project_dir,
                        bento_path,
                        bento_pb.bento.bento_service_metadata.env.
                        docker_base_image,
                    )
                    ecr_image_path = create_and_push_docker_image_to_ecr(
                        updated_sagemaker_config.region,
                        updated_deployment_spec.bento_name,
                        updated_deployment_spec.bento_version,
                        sagemaker_project_dir,
                    )
            else:
                logger.debug("Using existing ECR image for Sagemaker model")
                ecr_image_path = current_ecr_image_tag

            (
                updated_sagemaker_model_name,
                updated_sagemaker_endpoint_config_name,
                sagemaker_endpoint_name,
            ) = _get_sagemaker_resource_names(deployment_pb)
            (
                current_sagemaker_model_name,
                current_sagemaker_endpoint_config_name,
                _,
            ) = _get_sagemaker_resource_names(current_deployment)

            if (updated_sagemaker_config.api_name !=
                    current_sagemaker_config.api_name
                    or updated_sagemaker_config.
                    num_of_gunicorn_workers_per_instance !=
                    current_sagemaker_config.
                    num_of_gunicorn_workers_per_instance
                    or ecr_image_path != current_ecr_image_tag):
                logger.debug(
                    "Sagemaker model requires update. Delete current sagemaker model %s"
                    "and creating new model %s",
                    current_sagemaker_model_name,
                    updated_sagemaker_model_name,
                )
                _delete_sagemaker_model_if_exist(sagemaker_client,
                                                 current_sagemaker_model_name)
                _create_sagemaker_model(
                    sagemaker_client,
                    updated_sagemaker_model_name,
                    ecr_image_path,
                    updated_sagemaker_config,
                )
            # When bento service tag is not changed, we need to delete the current
            # endpoint configuration in order to create new one to avoid name collation
            if (current_sagemaker_endpoint_config_name ==
                    updated_sagemaker_endpoint_config_name):
                logger.debug(
                    "Current sagemaker config name %s is same as updated one, "
                    "delete it before create new endpoint config",
                    current_sagemaker_endpoint_config_name,
                )
                _delete_sagemaker_endpoint_config_if_exist(
                    sagemaker_client, current_sagemaker_endpoint_config_name)
            logger.debug(
                "Create new endpoint configuration %s",
                updated_sagemaker_endpoint_config_name,
            )
            _create_sagemaker_endpoint_config(
                sagemaker_client,
                updated_sagemaker_model_name,
                updated_sagemaker_endpoint_config_name,
                updated_sagemaker_config,
            )
            logger.debug(
                "Updating endpoint to new endpoint configuration %s",
                updated_sagemaker_endpoint_config_name,
            )
            _update_sagemaker_endpoint(
                sagemaker_client,
                sagemaker_endpoint_name,
                updated_sagemaker_endpoint_config_name,
            )
            if not (current_sagemaker_endpoint_config_name
                    == updated_sagemaker_endpoint_config_name):
                logger.debug(
                    'Delete old sagemaker endpoint config %s',
                    current_sagemaker_endpoint_config_name,
                )
                _delete_sagemaker_endpoint_config_if_exist(
                    sagemaker_client, current_sagemaker_endpoint_config_name)
        except AWSServiceError as e:
            delete_sagemaker_deployment_resources_if_exist(deployment_pb)
            raise e

        return ApplyDeploymentResponse(status=Status.OK(),
                                       deployment=deployment_pb)
Exemplo n.º 17
0
def test_deployment_labels():
    runner = CliRunner()
    cli = create_bentoml_cli()

    failed_result = runner.invoke(
        cli.commands['lambda'],
        [
            'deploy',
            'failed-name',
            '-b',
            'ExampleBentoService:version',
            '--labels',
            'test=abc',
        ],
    )
    assert failed_result.exit_code == 2

    with mock.patch(
            'bentoml.yatai.deployment.aws_lambda.operator.AwsLambdaDeploymentOperator.add'
    ) as mock_operator_add:
        bento_name = 'MockService'
        bento_version = 'MockVersion'
        deployment_name = f'test-label-{uuid.uuid4().hex[:8]}'
        deployment_namespace = 'test-namespace'
        mocked_deployment_pb = Deployment(name=deployment_name,
                                          namespace=deployment_namespace)
        mocked_deployment_pb.spec.bento_name = bento_name
        mocked_deployment_pb.spec.bento_version = bento_version
        mocked_deployment_pb.spec.operator = DeploymentSpec.AWS_LAMBDA
        mocked_deployment_pb.spec.aws_lambda_operator_config.memory_size = 1000
        mocked_deployment_pb.spec.aws_lambda_operator_config.timeout = 60
        mocked_deployment_pb.spec.aws_lambda_operator_config.region = 'us-west-2'
        mock_operator_add.return_value = ApplyDeploymentResponse(
            status=Status.OK(), deployment=mocked_deployment_pb)

        success_result = runner.invoke(
            cli.commands['lambda'],
            [
                'deploy',
                deployment_name,
                '-b',
                f'{bento_name}:{bento_version}',
                '--namespace',
                deployment_namespace,
                '--labels',
                'created_by:admin,cicd:passed',
                '--region',
                'us-west-2',
            ],
        )
        assert success_result.exit_code == 0

        list_result = runner.invoke(
            cli.commands['deployment'],
            [
                'list',
                '--labels',
                'created_by=admin,cicd NotIn (failed, unsuccessful)',
                '--output',
                'wide',
            ],
        )
        assert list_result.exit_code == 0
        assert deployment_name in list_result.output.strip()
        assert 'created_by:admin' in list_result.output.strip()