示例#1
0
 def __init__(self, archive_path, bento_name, bento_version, _cleanup=True):
     self.archive_path = archive_path
     self.bento_name = bento_name
     self.bento_version = bento_version
     self.temp_directory = TempDirectory()
     self._cleanup = _cleanup
     self.path = None
示例#2
0
class TemporaryServerlessConfig(object):
    def __init__(
        self,
        archive_path,
        deployment_name,
        region,
        stage,
        functions,
        provider_name,
        _cleanup=True,
    ):
        self.archive_path = archive_path
        self.temp_directory = TempDirectory()
        self.deployment_name = deployment_name
        self.region = region
        self.stage = stage
        self.functions = functions
        self.provider_name = provider_name
        self._cleanup = _cleanup
        self.path = None

    def __enter__(self):
        self.generate()
        return self.path

    def __exit__(self, exc_type, exc_val, exc_tb):
        if self._cleanup:
            self.cleanup()

    def generate(self):
        install_serverless_package()
        serverless_config = {
            "service": self.deployment_name,
            "provider": {
                "region": self.region,
                "stage": self.stage,
                "name": self.provider_name,
            },
            "functions": self.functions,
        }

        # if self.platform == "google-python":
        #     serverless_config["provider"]["name"] = "google"
        #     for api in apis:
        #         serverless_config["functions"][api.name] = {
        #             "handler": api.name,
        #             "events": [{"http": "path"}],
        #         }

        yaml = YAML()
        self.temp_directory.create()
        tempdir = self.temp_directory.path
        saved_path = os.path.join(tempdir, "serverless.yml")
        yaml.dump(serverless_config, Path(saved_path))
        self.path = tempdir

    def cleanup(self):
        self.temp_directory.cleanup()
        self.path = None
示例#3
0
def save_to_dir(bento_service, path, version=None, silent=False):
    """Save given BentoService along with all its artifacts, source code and
    dependencies to target file path, assuming path exist and empty. If target path
    is not empty, this call may override existing files in the given path.

    :param bento_service (bentoml.service.BentoService): a Bento Service instance
    :param path (str): Destination of where the bento service will be saved. The
        destination can be local path or remote path. The remote path supports both
        AWS S3('s3://bucket/path') and Google Cloud Storage('gs://bucket/path').
    :param version (str): Override the service version with given version string
    :param silent (boolean): whether to hide the log message showing target save path
    """
    track_save(bento_service)

    from bentoml.service import BentoService

    if not isinstance(bento_service, BentoService):
        raise BentoMLException(
            "save_to_dir only works with instances of custom BentoService class"
        )

    if version is not None:
        # If parameter version provided, set bento_service version
        # Otherwise it will bet set the first time the `version` property get accessed
        bento_service.set_version(version)

    if _is_remote_path(path):
        # If user provided path is an remote location, the bundle will first save to
        # a temporary directory and then upload to the remote location
        logger.info(
            'Saving bento to an remote path. BentoML will first save the bento '
            'to a local temporary directory and then upload to the remote path.'
        )
        with TempDirectory() as temp_dir:
            _write_bento_content_to_dir(bento_service, temp_dir)
            with TempDirectory() as tarfile_dir:
                file_name = f'{bento_service.name}.tar'
                tarfile_path = f'{tarfile_dir}/{file_name}'
                with tarfile.open(tarfile_path, mode="w:gz") as tar:
                    tar.add(temp_dir, arcname=bento_service.name)
            _upload_file_to_remote_path(path, tarfile_path, file_name)
    else:
        _write_bento_content_to_dir(bento_service, path)

    copy_zip_import_archives(
        os.path.join(path, bento_service.name, ZIPIMPORT_DIR),
        bento_service.__class__.__module__,
        list(get_zipmodules().keys()),
        bento_service.env._zipimport_archives or [],
    )

    if not silent:
        logger.info(
            "BentoService bundle '%s:%s' created at: %s",
            bento_service.name,
            bento_service.version,
            path,
        )
示例#4
0
 def __init__(self,
              archive_path,
              deployment_name,
              bento_name,
              template_type,
              _cleanup=True):
     self.archive_path = archive_path
     self.deployment_name = deployment_name
     self.bento_name = bento_name
     self.temp_directory = TempDirectory()
     self.template_type = template_type
     self._cleanup = _cleanup
     self.path = None
示例#5
0
    def pull(self, bento):
        """
        Pull a BentoService from a remote yatai service. The BentoService will be saved
        and registered with local yatai service.

        Args:
            bento: a BentoService identifier in the form of NAME:VERSION

        Returns:
            BentoService saved path

        Example:

        >>> remote_yatai_client = get_yatai_client('remote_yatai_service_address')
        >>> saved_path = remote_yatai_client.repository.pull('MyService:version')
        """
        track('py-api-pull')
        if isinstance(self.yatai_service, YataiService):
            raise BentoMLException('need set yatai_service_url')
        bento_pb = self.get(bento)
        with TempDirectory() as tmpdir:
            # Create a non-exist directory for safe_retrieve
            target_bundle_path = os.path.join(tmpdir, 'bundle')
            self.download_to_directory(bento_pb, target_bundle_path)

            from bentoml.yatai.client import get_yatai_client

            local_yc = get_yatai_client()
            return local_yc.repository.upload_from_dir(target_bundle_path)
    def upload(
        self,
        bento_service: "BentoService",
        version: str = None,
        labels: Dict = None,
    ) -> "BentoUri":
        """
        Save and upload given :class:`~bentoml.BentoService`
        to YataiService.

        Args:
            bento_service (:class:`~bentoml.BentoService`):
                a BentoService instance
            version (`str`, `optional`):
                version of ``bento_service``
            labels (`dict`, `optional`):
                :class:`~bentoml.BentoService` metadata

        Returns:
            BentoUri as gRPC stub for save location of BentoService.

        Example::

            from bentoml.yatai.client import get_yatai_client

            svc = MyBentoService()
            svc.save()

            remote_yatai_client = get_yatai_client('https://remote.yatai.service:50050')
            remote_path = remote_yatai_client.repository.upload(svc)
        """
        with TempDirectory() as tmpdir:
            save_to_dir(bento_service, tmpdir, version, silent=True)
            return self.upload_from_dir(tmpdir, labels)
示例#7
0
def test_s3(minio_address):
    yatai_server_command = [
        'bentoml',
        'yatai-service-start',
        '--no-ui',
        '--grpc-port',
        '50051',
        '--repo-base-url',
        f's3://{bucket_name}/',
        '--s3-endpoint-url',
        'localhost:9000',
    ]
    proc = subprocess.Popen(yatai_server_command,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE)
    yatai_server_url = "localhost:50051"
    svc = ExampleBentoService()
    svc.pack('model', {'model': 'abc'})
    bento_tag = f'{svc.name}:{svc.version}'
    saved_path = svc.save(yatai_url=yatai_server_url)
    yc = get_yatai_client(yatai_server_url)

    assert saved_path.startswith('s3://')

    bento_pb = yc.repository.get(bento_tag)
    with TempDirectory() as temp_dir:
        yc.repository.download_to_directory(bento_pb, f'{temp_dir}/bundle')
        assert os.path.exists(f'{temp_dir}/bundle/bentoml.yml')
    proc.kill()
 def _download_bento(self, bento_name, bento_version):
     with TempDirectory(cleanup=False) as temp_dir:
         try:
             temp_tar_path = os.path.join(temp_dir,
                                          f'{uuid.uuid4().hex[:12]}.tar')
             response_iterator = self.yatai_service.DownloadBento(
                 DownloadBentoRequest(bento_name=bento_name,
                                      bento_version=bento_version),
                 timeout=DEFAULT_GRPC_REQUEST_TIMEOUT,
             )
             with open(temp_tar_path, 'wb+') as file:
                 for response in response_iterator:
                     if response.status.status_code != status_pb2.Status.OK:
                         raise BentoMLException(
                             response.status.error_message)
                     file.write(response.bento_bundle)
                 file.seek(0)
                 temp_bundle_path = os.path.join(
                     temp_dir, f'{bento_name}_{bento_version}')
                 with tarfile.open(fileobj=file, mode='r') as tar:
                     tar.extractall(path=temp_bundle_path)
             return temp_bundle_path
         except grpc.RpcError as e:
             raise BentoMLRpcError(
                 e,
                 f'Failed to download {bento_name}:{bento_version} from '
                 f'the remote yatai server',
             )
示例#9
0
    def pull(self, bento):
        """
        Pull a BentoService from a remote yatai service. The BentoService will be saved
        and registered with local yatai service.

        Args:
            bento: a BentoService identifier in the form of NAME:VERSION

        Returns:
            BentoService saved path

        Example:

        >>> client = get_yatai_client('127.0.0.1:50051')
        >>> saved_path = client.repository.pull('MyService:')
        """
        track('py-api-pull')

        bento_pb = self.get(bento)
        with TempDirectory() as tmpdir:
            # Create a non-exist directory for safe_retrieve
            target_bundle_path = os.path.join(tmpdir, 'bundle')
            self.download_to_directory(bento_pb, target_bundle_path)

            from bentoml.yatai.client import get_yatai_client

            labels = (dict(bento_pb.bento_service_metadata.labels)
                      if bento_pb.bento_service_metadata.labels else None)

            local_yc = get_yatai_client()
            return local_yc.repository.upload_from_dir(target_bundle_path,
                                                       labels=labels)
示例#10
0
    def pull(self, bento: str) -> "BentoUri":
        """
        Pull a :class:`~bentoml.BentoService` from remote Yatai.
        The BentoService will then be saved and registered to local Yatai.

        Args:
            bento (`str`):
                a BentoService identifier in the form of ``NAME:VERSION``

        Returns:
            :class:`reflection.GeneratedProtocolMessageType`:
                URI as gRPC stub for save location of BentoService.

        Example::

            from bentoml.yatai.client import get_yatai_client
            client = get_yatai_client('127.0.0.1:50051')
            saved_path = client.repository.pull('MyService:20210808_E38F3')
        """
        bento_pb: "Bento" = self.get(bento)
        with TempDirectory() as tmpdir:
            # Create a non-exist directory for safe_retrieve
            target_bundle_path = os.path.join(tmpdir, 'bundle')
            self.download_to_directory(bento_pb, target_bundle_path)

            from bentoml.yatai.client import get_yatai_client

            labels = (dict(bento_pb.bento_service_metadata.labels)
                      if bento_pb.bento_service_metadata.labels else None)

            local_yc = get_yatai_client()
            return local_yc.repository.upload_from_dir(target_bundle_path,
                                                       labels=labels)
示例#11
0
def save(bento_service, dst, version=None):
    """
    Save given BentoService along with all artifacts to target path
    """

    if version is None:
        version = _generate_new_version_str()
    _validate_version_str(version)

    if bento_service._version_major is not None and bento_service._version_minor is not None:
        # BentoML uses semantic versioning for BentoService distribution
        # when user specified the MAJOR and MINOR version number along with
        # the BentoService class definition with '@ver' decorator.
        # The parameter version(or auto generated version) here will be used as
        # PATCH field in the final version:
        version = '.'.join(
            [str(bento_service._version_major),
             str(bento_service._version_minor), version])

    # Full path containing saved BentoArchive, it the dst path with service name
    # and service version as prefix. e.g.:
    # - s3://my-bucket/base_path => s3://my-bucket/base_path/service_name/version/
    # - /tmp/my_bento_archive/ => /tmp/my_bento_archive/service_name/version/
    full_saved_path = os.path.join(dst, bento_service.name, version)

    if is_s3_url(dst):
        with TempDirectory() as tempdir:
            _save(bento_service, tempdir, version)
            upload_to_s3(full_saved_path, tempdir)
    else:
        _save(bento_service, dst, version)

    return full_saved_path
示例#12
0
class TemporaryServerlessContent(object):
    def __init__(self,
                 archive_path,
                 deployment_name,
                 bento_name,
                 template_type,
                 _cleanup=True):
        self.archive_path = archive_path
        self.deployment_name = deployment_name
        self.bento_name = bento_name
        self.temp_directory = TempDirectory()
        self.template_type = template_type
        self._cleanup = _cleanup
        self.path = None

    def __enter__(self):
        self.generate()
        return self.path

    def __exit__(self, exc_type, exc_val, exc_tb):
        if self._cleanup:
            self.cleanup()

    def generate(self):
        self.temp_directory.create()
        tempdir = self.temp_directory.path
        call_serverless_command(
            [
                "serverless",
                "create",
                "--template",
                self.template_type,
                "--name",
                self.deployment_name,
            ],
            tempdir,
        )
        shutil.copy(os.path.join(self.archive_path, "requirements.txt"),
                    tempdir)
        model_serivce_archive_path = os.path.join(tempdir, self.bento_name)
        model_path = os.path.join(self.archive_path, self.bento_name)
        shutil.copytree(model_path, model_serivce_archive_path)
        self.path = tempdir

    def cleanup(self):
        self.temp_directory.cleanup()
        self.path = None
示例#13
0
        def ContainerizeBento(self, request, context=None):
            try:
                ensure_docker_available_or_raise()
                tag = request.tag
                if tag is None or len(tag) == 0:
                    name = to_valid_docker_image_name(request.bento_name)
                    version = to_valid_docker_image_version(request.bento_version)
                    tag = f"{name}:{version}"
                if ":" not in tag:
                    version = to_valid_docker_image_version(request.bento_version)
                    tag = f"{tag}:{version}"
                import docker

                docker_client = docker.from_env()
                bento_pb = self.bento_metadata_store.get(
                    request.bento_name, request.bento_version
                )
                if not bento_pb:
                    raise YataiRepositoryException(
                        f'BentoService {request.bento_name}:{request.bento_version} '
                        f'does not exist'
                    )

                with TempDirectory() as temp_dir:
                    temp_bundle_path = f'{temp_dir}/{bento_pb.name}'
                    bento_service_bundle_path = bento_pb.uri.uri
                    if bento_pb.uri.type == BentoUri.S3:
                        bento_service_bundle_path = self.repo.get(
                            bento_pb.name, bento_pb.version
                        )
                    elif bento_pb.uri.type == BentoUri.GCS:
                        bento_service_bundle_path = self.repo.get(
                            bento_pb.name, bento_pb.version
                        )
                    safe_retrieve(bento_service_bundle_path, temp_bundle_path)
                    try:
                        docker_client.images.build(
                            path=temp_bundle_path,
                            tag=tag,
                            buildargs=dict(request.build_args),
                        )
                    except (docker.errors.APIError, docker.errors.BuildError) as error:
                        logger.error(f'Encounter container building issue: {error}')
                        raise YataiRepositoryException(error)
                    if request.push is True:
                        try:
                            docker_client.images.push(
                                repository=request.repository, tag=tag
                            )
                        except docker.errors.APIError as error:
                            raise YataiRepositoryException(error)

                    return ContainerizeBentoResponse(status=Status.OK(), tag=tag)
            except BentoMLException as e:
                logger.error(f"RPC ERROR ContainerizeBento: {e}")
                return ContainerizeBentoResponse(status=e.status_proto)
            except Exception as e:  # pylint: disable=broad-except
                logger.error(f"RPC ERROR ContainerizeBento: {e}")
                return ContainerizeBentoResponse(status=Status.INTERNAL(e))
示例#14
0
    def check_status(self):
        """Check deployment status for the bentoml service.
        return True, if it is active else return false
        """

        apis = self.bento_service.get_service_apis()
        config = {
            "service": self.bento_service.name,
            "provider": {
                "region": self.region,
                "stage": self.stage
            },
            "functions": {},
        }
        if self.platform == "google-python":
            config["provider"]["name"] = "google"
            for api in apis:
                config["functions"][api.name] = {
                    "handler": api.name,
                    "events": [{
                        "http": "path"
                    }],
                }
        elif self.platform == "aws-lambda" or self.platform == "aws-lambda-py2":
            config["provider"]["name"] = "aws"
            for api in apis:
                config["functions"][api.name] = {
                    "handler":
                    "handler." + api.name,
                    "events": [{
                        "http": {
                            "path": "/" + api.name,
                            "method": "post"
                        }
                    }],
                }
        else:
            raise BentoMLException(
                "check serverless does not support platform %s at the moment" %
                self.platform)
        yaml = YAML()

        with TempDirectory() as tempdir:
            saved_path = os.path.join(tempdir, "serverless.yml")
            yaml.dump(config, Path(saved_path))
            with subprocess.Popen(["serverless", "info"],
                                  cwd=tempdir,
                                  stdout=PIPE,
                                  stderr=PIPE) as proc:
                # We don't use the parse_response function here.
                # Instead of raising error, we will just return false
                content = proc.stdout.read().decode("utf-8")
                response = content.strip().split("\n")
                logger.debug("Serverless response: %s", "\n".join(response))
                error = [s for s in response if "Serverless Error" in s]
                if error:
                    return False, "\n".join(response)
                else:
                    return True, "\n".join(response)
示例#15
0
    def check_status(self):
        """Check deployment status for the bentoml service.
        return True, if it is active else return false
        """

        apis = self.bento_service.get_service_apis()
        config = {
            "service": self.bento_service.name,
            "provider": {
                "region": self.region,
                "stage": self.stage
            },
            "functions": {}
        }
        if self.platform == 'google-python':
            config['provider']['name'] = 'google'
            for api in apis:
                config['functions'][api.name] = {
                    'handler': api.name,
                    'events': [{
                        'http': 'path'
                    }]
                }
        elif self.platform == 'aws-lambda' or self.platform == 'aws-lambda-py2':
            config['provider']['name'] = 'aws'
            for api in apis:
                config['functions'][api.name] = {
                    'handler':
                    'handler.' + api.name,
                    'events': [{
                        'http': {
                            "path": '/' + api.name,
                            "method": 'post'
                        }
                    }]
                }
        else:
            raise BentoMLException(
                'check serverless does not support platform %s at the moment' %
                self.platform)
        yaml = YAML()

        with TempDirectory() as tempdir:
            saved_path = os.path.join(tempdir, 'serverless.yml')
            yaml.dump(config, Path(saved_path))
            with subprocess.Popen(['serverless', 'info'],
                                  cwd=tempdir,
                                  stdout=PIPE,
                                  stderr=PIPE) as proc:
                # We don't use the parse_response function here.
                # Instead of raising error, we will just return false
                content = proc.stdout.read().decode('utf-8')
                response = content.strip().split('\n')
                logger.debug('Serverless response: %s', '\n'.join(response))
                error = [s for s in response if 'Serverless Error' in s]
                if error:
                    return False, '\n'.join(response)
                else:
                    return True, '\n'.join(response)
示例#16
0
    def delete(self, deployment_pb, yatai_service=None):
        try:
            state = self.describe(deployment_pb, yatai_service).state
            if state.state != DeploymentState.RUNNING:
                message = (
                    'Failed to delete, no active deployment {name}. '
                    'The current state is {state}'.format(
                        name=deployment_pb.name,
                        state=DeploymentState.State.Name(state.state),
                    )
                )
                return DeleteDeploymentResponse(status=Status.ABORTED(message))

            deployment_spec = deployment_pb.spec
            aws_config = deployment_spec.aws_lambda_operator_config

            bento_pb = yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                )
            )
            bento_service_metadata = bento_pb.bento.bento_service_metadata
            # We are not validating api_name, because for delete, you don't
            # need them.
            api_names = (
                [aws_config.api_name]
                if aws_config.api_name
                else [api.name for api in bento_service_metadata.apis]
            )

            with TempDirectory() as serverless_project_dir:
                generate_aws_lambda_serverless_config(
                    bento_service_metadata.env.python_version,
                    deployment_pb.name,
                    api_names,
                    serverless_project_dir,
                    aws_config.region,
                    # BentoML deployment namespace is mapping to serverless `stage`
                    # concept
                    stage=deployment_pb.namespace,
                )
                response = call_serverless_command(['remove'], serverless_project_dir)
                stack_name = '{name}-{namespace}'.format(
                    name=deployment_pb.name, namespace=deployment_pb.namespace
                )
                if "Serverless: Stack removal finished..." in response:
                    status = Status.OK()
                elif "Stack '{}' does not exist".format(stack_name) in response:
                    status = Status.NOT_FOUND(
                        'Deployment {} not found'.format(stack_name)
                    )
                else:
                    status = Status.ABORTED()

            return DeleteDeploymentResponse(status=status)
        except BentoMLException as error:
            return DeleteDeploymentResponse(status=exception_to_return_status(error))
示例#17
0
def yatai_server_container():
    ensure_docker_available_or_raise()
    docker_client = docker.from_env()
    local_bentoml_repo_path = os.path.abspath(__file__ + "/../../../")
    yatai_docker_image_tag = f'bentoml/yatai-service:e2e-test-{uuid.uuid4().hex[:6]}'

    # Note: When set both `custom_context` and `fileobj`, docker api will not use the
    #       `path` provide... docker/api/build.py L138. The solution is create an actual
    #       Dockerfile along with path, instead of fileobj and custom_context.
    with TempDirectory() as temp_dir:
        temp_docker_file_path = os.path.join(temp_dir, 'Dockerfile')
        with open(temp_docker_file_path, 'w') as f:
            f.write(
                f"""\
FROM bentoml/yatai-service:{LAST_PYPI_RELEASE_VERSION}
ADD . /bentoml-local-repo
RUN pip install /bentoml-local-repo
            """
            )
        logger.info(f'building docker image {yatai_docker_image_tag}')
        docker_client.images.build(
            path=local_bentoml_repo_path,
            dockerfile=temp_docker_file_path,
            tag=yatai_docker_image_tag,
        )

        container_name = f'e2e-test-yatai-service-container-{uuid.uuid4().hex[:6]}'
        yatai_service_url = 'localhost:50051'
        command = [
            'docker',
            'run',
            '--rm',
            '--name',
            container_name,
            '-e',
            'BENTOML_HOME=/tmp',
            '-p',
            '50051:50051',
            '-p',
            '3000:3000',
            yatai_docker_image_tag,
        ]

        logger.info(f"Starting docker container {container_name}: {command}")
        docker_proc = subprocess.Popen(
            command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
        )
        wait_until_container_ready(
            container_name, b'* Starting BentoML YataiService gRPC Server'
        )

        with modified_environ(BENTOML__YATAI_SERVICE__URL=yatai_service_url):
            yield yatai_service_url

        logger.info(f"Shutting down docker container: {container_name}")
        os.kill(docker_proc.pid, signal.SIGINT)
示例#18
0
 def __init__(
     self,
     archive_path,
     deployment_name,
     region,
     stage,
     functions,
     provider_name,
     _cleanup=True,
 ):
     self.archive_path = archive_path
     self.temp_directory = TempDirectory()
     self.deployment_name = deployment_name
     self.region = region
     self.stage = stage
     self.functions = functions
     self.provider_name = provider_name
     self._cleanup = _cleanup
     self.path = None
示例#19
0
    def _apply(self, deployment_pb, bento_pb, yatai_service, bento_path):
        if loader._is_remote_path(bento_path):
            with loader._resolve_remote_bundle_path(bento_path) as local_path:
                return self._apply(deployment_pb, bento_pb, yatai_service,
                                   local_path)

        deployment_spec = deployment_pb.spec
        aws_config = deployment_spec.aws_lambda_operator_config

        bento_service_metadata = bento_pb.bento.bento_service_metadata

        template = 'aws-python3'
        if version.parse(bento_service_metadata.env.python_version
                         ) < version.parse('3.0.0'):
            template = 'aws-python'

        api_names = ([aws_config.api_name] if aws_config.api_name else
                     [api.name for api in bento_service_metadata.apis])
        ensure_deploy_api_name_exists_in_bento(
            [api.name for api in bento_service_metadata.apis], api_names)

        with TempDirectory() as serverless_project_dir:
            init_serverless_project_dir(
                serverless_project_dir,
                bento_path,
                deployment_pb.name,
                deployment_spec.bento_name,
                template,
            )
            generate_aws_lambda_handler_py(deployment_spec.bento_name,
                                           api_names, serverless_project_dir)
            generate_aws_lambda_serverless_config(
                bento_service_metadata.env.python_version,
                deployment_pb.name,
                api_names,
                serverless_project_dir,
                aws_config.region,
                # BentoML deployment namespace is mapping to serverless `stage`
                # concept
                stage=deployment_pb.namespace,
            )
            logger.info(
                'Installing additional packages: serverless-python-requirements'
            )
            install_serverless_plugin("serverless-python-requirements",
                                      serverless_project_dir)
            logger.info('Deploying to AWS Lambda')
            call_serverless_command(["deploy"], serverless_project_dir)

        res_deployment_pb = Deployment(state=DeploymentState())
        res_deployment_pb.CopyFrom(deployment_pb)
        state = self.describe(res_deployment_pb, yatai_service).state
        res_deployment_pb.state.CopyFrom(state)
        return ApplyDeploymentResponse(status=Status.OK(),
                                       deployment=res_deployment_pb)
示例#20
0
def temporary_yatai_service_url():
    ensure_docker_available_or_raise()
    docker_client = docker.from_env()
    local_bentoml_repo_path = os.path.abspath(
        os.path.join(__file__, '..', '..'))
    docker_tag = f'bentoml/yatai-service:e2e-test-{uuid.uuid4().hex[:6]}'

    # Note: When set both `custom_context` and `fileobj`, docker api will not use the
    #       `path` provide... docker/api/build.py L138. The solution is create an actual
    #       Dockerfile along with path, instead of fileobj and custom_context.
    with TempDirectory() as temp_dir:
        temp_docker_file_path = os.path.join(temp_dir, 'Dockerfile')
        with open(temp_docker_file_path, 'w') as f:
            f.write(f"""\
FROM bentoml/yatai-service:{PREV_PYPI_RELEASE_VERSION}
ADD . /bentoml-local-repo
RUN pip install /bentoml-local-repo
            """)
        logger.info('building docker image')
        docker_client.images.build(
            path=local_bentoml_repo_path,
            dockerfile=temp_docker_file_path,
            tag=docker_tag,
        )
        logger.info('complete build docker image')

        container_name = f'e2e-test-yatai-service-container-{uuid.uuid4().hex[:6]}'
        yatai_service_url = 'localhost:50051'

        command = [
            'docker',
            'run',
            '--rm',
            '--name',
            container_name,
            '-p',
            '50051:50051',
            '-p',
            '3000:3000',
            docker_tag,
            '--repo-base-url',
            '/tmp',
        ]

        logger.info(f'Running docker command {" ".join(command)}')

        docker_proc = subprocess.Popen(command,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE)
        wait_for_docker_container_ready(
            container_name, b'* Starting BentoML YataiService gRPC Server')

        yield yatai_service_url
        docker_proc.terminate()
示例#21
0
    def _add(self, deployment_pb, bento_pb, bento_path):
        if loader._is_remote_path(bento_path):
            with loader._resolve_remote_bundle_path(bento_path) as local_path:
                return self._add(deployment_pb, bento_pb, local_path)

        deployment_spec = deployment_pb.spec
        sagemaker_config = deployment_spec.sagemaker_operator_config

        raise_if_api_names_not_found_in_bento_service_metadata(
            bento_pb.bento.bento_service_metadata, [sagemaker_config.api_name])

        sagemaker_client = boto3.client("sagemaker", sagemaker_config.region)

        with TempDirectory() as temp_dir:
            sagemaker_project_dir = os.path.join(temp_dir,
                                                 deployment_spec.bento_name)
            _init_sagemaker_project(
                sagemaker_project_dir,
                bento_path,
                bento_pb.bento.bento_service_metadata.env.docker_base_image,
            )
            ecr_image_path = create_and_push_docker_image_to_ecr(
                sagemaker_config.region,
                deployment_spec.bento_name,
                deployment_spec.bento_version,
                sagemaker_project_dir,
            )

        try:
            (
                sagemaker_model_name,
                sagemaker_endpoint_config_name,
                sagemaker_endpoint_name,
            ) = _get_sagemaker_resource_names(deployment_pb)

            _create_sagemaker_model(sagemaker_client, sagemaker_model_name,
                                    ecr_image_path, sagemaker_config)
            _create_sagemaker_endpoint_config(
                sagemaker_client,
                sagemaker_model_name,
                sagemaker_endpoint_config_name,
                sagemaker_config,
            )
            _create_sagemaker_endpoint(
                sagemaker_client,
                sagemaker_endpoint_name,
                sagemaker_endpoint_config_name,
            )
        except AWSServiceError as e:
            delete_sagemaker_deployment_resources_if_exist(deployment_pb)
            raise e

        return ApplyDeploymentResponse(status=Status.OK(),
                                       deployment=deployment_pb)
示例#22
0
class TemporarySageMakerContent(object):
    def __init__(self, archive_path, bento_name, bento_version, _cleanup=True):
        self.archive_path = archive_path
        self.bento_name = bento_name
        self.bento_version = bento_version
        self.temp_directory = TempDirectory()
        self._cleanup = _cleanup
        self.path = None

    def __enter__(self):
        self.generate()
        return self.path

    def generate(self):
        self.temp_directory.create()
        tempdir = self.temp_directory.path
        saved_path = os.path.join(tempdir, self.bento_name, self.bento_version)
        shutil.copytree(self.archive_path, saved_path)

        with open(os.path.join(saved_path, "nginx.conf"), "w") as f:
            f.write(DEFAULT_NGINX_CONFIG)
        with open(os.path.join(saved_path, "wsgi.py"), "w") as f:
            f.write(DEFAULT_WSGI_PY)
        with open(os.path.join(saved_path, "serve"), "w") as f:
            f.write(DEFAULT_SERVE_SCRIPT)

        # permission 755 is required for entry script 'serve'
        permission = "755"
        octal_permission = int(permission, 8)
        os.chmod(os.path.join(saved_path, "serve"), octal_permission)
        self.path = saved_path

    def cleanup(self):
        self.temp_directory.cleanup()
        self.path = None

    def __exit__(self, exc_type, exc_val, exc_tb):
        if self._cleanup:
            self.cleanup()
示例#23
0
    def upload(self, bento_service, version=None):
        """Save and upload given bento_service to yatai_service, which manages all your
        saved BentoService bundles and model serving deployments.

        Args:
            bento_service (bentoml.service.BentoService): a Bento Service instance
            version (str): optional,
        Return:
            URI to where the BentoService is being saved to
        """
        with TempDirectory() as tmpdir:
            save_to_dir(bento_service, tmpdir, version, silent=True)
            return self._upload_bento_service(tmpdir)
示例#24
0
def test_pytorch_lightning_model_artifact_with_saved_lightning_model():
    with TempDirectory() as temp_dir:
        svc = PytorchLightningService()
        model = TorchLightningModel()
        script = model.to_torchscript()
        script_path = f'{temp_dir}/model.pt'
        torch.jit.save(script, script_path)
        svc.pack('model', script_path)

        saved_path = svc.save()
        svc = bentoml.load(saved_path)
        result = svc.predict(pd.DataFrame([[5, 4, 3, 2]]))
        assert result.tolist() == [[6, 5, 4, 3]]
示例#25
0
    def apply(self, deployment_pb, yatai_service, prev_deployment=None):
        try:
            deployment_spec = deployment_pb.spec
            gcp_config = deployment_spec.gcp_function_operator_config
            bento_pb = yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                ))
            if bento_pb.bento.uri.type != BentoUri.LOCAL:
                raise BentoMLException(
                    'BentoML currently only support local repository')
            else:
                bento_path = bento_pb.bento.uri.uri
            bento_service_metadata = bento_pb.bento.bento_service_metadata

            api_names = ([gcp_config.api_name] if gcp_config.api_name else
                         [api.name for api in bento_service_metadata.apis])
            ensure_deploy_api_name_exists_in_bento(
                [api.name for api in bento_service_metadata.apis], api_names)
            with TempDirectory() as serverless_project_dir:
                init_serverless_project_dir(
                    serverless_project_dir,
                    bento_path,
                    deployment_pb.name,
                    deployment_spec.bento_name,
                    'google-python',
                )
                generate_gcp_function_main_py(deployment_spec.bento_name,
                                              api_names,
                                              serverless_project_dir)
                generate_gcp_function_serverless_config(
                    deployment_pb.name,
                    api_names,
                    serverless_project_dir,
                    gcp_config.region,
                    # BentoML namespace is mapping to serverless stage.
                    stage=deployment_pb.namespace,
                )
                call_serverless_command(["deploy"], serverless_project_dir)

            res_deployment_pb = Deployment(state=DeploymentState())
            res_deployment_pb.CopyFrom(deployment_pb)
            state = self.describe(res_deployment_pb, yatai_service).state
            res_deployment_pb.state.CopyFrom(state)

            return ApplyDeploymentResponse(status=Status.OK(),
                                           deployment=res_deployment_pb)
        except BentoMLException as error:
            return ApplyDeploymentResponse(
                status=exception_to_return_status(error))
def test_save_load(yatai_service_url, example_bento_service_class):
    yc = get_yatai_client(yatai_service_url)
    test_model = TestModel()
    svc = example_bento_service_class()
    svc.pack('model', test_model)

    saved_path = svc.save(yatai_url=yatai_service_url)
    assert saved_path

    bento_pb = yc.repository.get(f'{svc.name}:{svc.version}')
    with TempDirectory() as temp_dir:
        new_temp_dir = os.path.join(temp_dir, uuid.uuid4().hex[:12])
        yc.repository.download_to_directory(bento_pb, new_temp_dir)
        bento_service = load_from_dir(new_temp_dir)
        assert bento_service.predict(1) == 2
示例#27
0
def _update_azure_functions(
    namespace,
    deployment_name,
    deployment_spec,
    bento_pb,
    bento_path,
):
    azure_functions_config = deployment_spec.azure_functions_operator_config
    (
        resource_group_name,
        _,
        _,
        function_name,
        container_registry_name,
    ) = _generate_azure_resource_names(namespace, deployment_name)
    with TempDirectory() as temp_dir:
        azure_functions_project_dir = os.path.join(temp_dir,
                                                   deployment_spec.bento_name)
        _init_azure_functions_project(
            azure_functions_project_dir,
            bento_path,
            azure_functions_config,
        )
        docker_tag = _build_and_push_docker_image_to_azure_container_registry(
            azure_functions_project_dir=azure_functions_project_dir,
            container_registry_name=container_registry_name,
            resource_group_name=resource_group_name,
            bento_name=bento_pb.name,
            bento_version=bento_pb.version,
            bento_python_version=bento_pb.bento_service_metadata.env.
            python_version,
        )
        _call_az_cli(
            command=[
                'az',
                'functionapp',
                'config',
                'container',
                'set',
                '--name',
                function_name,
                '--resource-group',
                resource_group_name,
                '--docker-custom-image-name',
                docker_tag,
            ],
            message='update Azure functionapp settings',
        )
示例#28
0
def save(bento_service, dst, version=None):
    """Save given BentoService along with all its artifacts, source code and
    dependencies to target path

    Args:
        bento_service (bentoml.service.BentoService): a Bento Service instance
        dst (str): Destination of where the bento service will be saved. It could
            be a local file path or a s3 path
        version (:obj:`str`, optional): version text to use for saved archive

    Returns:
        string: The complete path of saved Bento service.
    """

    if version is None:
        version = _generate_new_version_str()
    _validate_version_str(version)

    if (bento_service._version_major is not None
            and bento_service._version_minor is not None):
        # BentoML uses semantic versioning for BentoService distribution
        # when user specified the MAJOR and MINOR version number along with
        # the BentoService class definition with '@ver' decorator.
        # The parameter version(or auto generated version) here will be used as
        # PATCH field in the final version:
        version = ".".join([
            str(bento_service._version_major),
            str(bento_service._version_minor),
            version,
        ])

    # Full path containing saved BentoArchive, it the dst path with service name
    # and service version as prefix. e.g.:
    # - s3://my-bucket/base_path => s3://my-bucket/base_path/service_name/version/
    # - /tmp/my_bento_archive/ => /tmp/my_bento_archive/service_name/version/
    full_saved_path = os.path.join(dst, bento_service.name, version)

    if is_s3_url(dst):
        with TempDirectory() as tempdir:
            _save(bento_service, tempdir, version)
            upload_to_s3(full_saved_path, tempdir)
    else:
        _save(bento_service, dst, version)

    LOG.info("BentoService %s:%s saved to %s", bento_service.name, version,
             full_saved_path)
    return full_saved_path
示例#29
0
def local_yatai_server(db_url=None, repo_base_url=None):
    ensure_docker_available_or_raise()
    docker_client = docker.from_env()
    local_bentoml_repo_path = os.path.abspath(__file__ + "/../../../")
    yatai_docker_image_tag = f'bentoml/yatai-service:e2e-test-{uuid.uuid4().hex[:6]}'

    # Note: When set both `custom_context` and `fileobj`, docker api will not use the
    #   `path` provide... docker/api/build.py L138. The solution is create an actual
    #   Dockerfile along with path, instead of fileobj and custom_context.
    with TempDirectory() as temp_dir:
        temp_docker_file_path = os.path.join(temp_dir, 'Dockerfile')
        with open(temp_docker_file_path, 'w') as f:
            f.write(f"""\
FROM bentoml/yatai-service:{LAST_PYPI_RELEASE_VERSION}
ADD . /bentoml-local-repo
RUN pip install /bentoml-local-repo
            """)
        logger.info(f'building docker image {yatai_docker_image_tag}')
        docker_client.images.build(
            path=local_bentoml_repo_path,
            dockerfile=temp_docker_file_path,
            tag=yatai_docker_image_tag,
        )

        container_name = f'yatai-service-container-{uuid.uuid4().hex[:6]}'
        yatai_service_url = 'localhost:50051'
        yatai_server_command = ['bentoml', 'yatai-service-start', '--no-ui']
        if db_url:
            yatai_server_command.extend(['--db-url', db_url])
        if repo_base_url:
            yatai_server_command.extend(['--repo-base-url', repo_base_url])
        container = docker_client.containers.run(
            image=yatai_docker_image_tag,
            environment=['BENTOML_HOME=/tmp'],
            ports={'50051/tcp': 50051},
            command=yatai_server_command,
            name=container_name,
            detach=True,
        )

        wait_until_container_ready(container)
        yield yatai_service_url

        logger.info(f"Shutting down docker container: {container_name}")
        container.kill()
示例#30
0
    def delete(self, deployment_pb, yatai_service=None):
        try:
            state = self.describe(deployment_pb, yatai_service).state
            if state.state != DeploymentState.RUNNING:
                message = ('Failed to delete, no active deployment {name}. '
                           'The current state is {state}'.format(
                               name=deployment_pb.name,
                               state=DeploymentState.State.Name(state.state),
                           ))
                return DeleteDeploymentResponse(status=Status.ABORTED(message))

            deployment_spec = deployment_pb.spec
            gcp_config = deployment_spec.gcp_function_operator_config

            bento_pb = yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                ))
            bento_service_metadata = bento_pb.bento.bento_service_metadata
            api_names = ([gcp_config.api_name] if gcp_config.api_name else
                         [api.name for api in bento_service_metadata.apis])
            with TempDirectory() as serverless_project_dir:
                generate_gcp_function_serverless_config(
                    deployment_pb.name,
                    api_names,
                    serverless_project_dir,
                    gcp_config.region,
                    # BentoML namespace is mapping to serverless stage.
                    stage=deployment_pb.namespace,
                )
                try:
                    response = call_serverless_command(['remove'],
                                                       serverless_project_dir)
                    if "Serverless: Stack removal finished..." in response:
                        status = Status.OK()
                    else:
                        status = Status.ABORTED()
                except BentoMLException as e:
                    status = Status.INTERNAL(str(e))

            return DeleteDeploymentResponse(status=status)
        except BentoMLException as error:
            return DeleteDeploymentResponse(
                status=exception_to_return_status(error))