예제 #1
0
def postgres_db_container_url():
    ensure_docker_available_or_raise()
    container_name = f'e2e-test-yatai-service-postgres-db-{uuid.uuid4().hex[:6]}'
    db_url = 'postgresql://*****:*****@localhost:5432/bentoml'

    command = [
        'docker',
        'run',
        '--rm',
        '--name',
        container_name,
        '-e',
        'POSTGRES_PASSWORD=postgres',
        '-p',
        '5432:5432',
        'postgres',
    ]
    logger.info(
        f"Starting Postgres Server container {container_name}: {command}")
    docker_proc = subprocess.Popen(command,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
    wait_until_container_ready(
        container_name, b'database system is ready to accept connections')

    from sqlalchemy_utils import create_database

    create_database(db_url)
    yield db_url

    logger.info(f"Shutting down Postgres Server container: {container_name}")
    os.kill(docker_proc.pid, signal.SIGINT)
예제 #2
0
def yatai_server_container():
    ensure_docker_available_or_raise()

    yatai_docker_image_tag = f'bentoml/yatai-service:{LAST_PYPI_RELEASE_VERSION}'
    container_name = f'e2e-test-yatai-service-container-{uuid.uuid4().hex[:6]}'
    port = '50055'
    command = [
        'docker',
        'run',
        '--rm',
        '--name',
        container_name,
        '-e',
        'BENTOML_HOME=/tmp',
        '-p',
        f'{port}:{port}',
        '-p',
        '3000:3000',
        yatai_docker_image_tag,
        '--grpc-port',
        port,
    ]

    logger.info(f"Starting docker container {container_name}: {command}")
    docker_proc = subprocess.Popen(
        command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
    )
    wait_until_container_ready(
        container_name, b'* Starting BentoML YataiService gRPC Server', 120
    )

    yield f'localhost:{port}'

    logger.info(f"Shutting down docker container: {container_name}")
    os.kill(docker_proc.pid, signal.SIGINT)
예제 #3
0
    def update(self, deployment_pb, previous_deployment):
        try:
            ensure_sam_available_or_raise()
            ensure_docker_available_or_raise()
            deployment_spec = deployment_pb.spec
            ec2_deployment_config = deployment_spec.aws_ec2_operator_config
            ec2_deployment_config.region = (ec2_deployment_config.region
                                            or get_default_aws_region())
            if not ec2_deployment_config.region:
                raise InvalidArgument("AWS region is missing")

            bento_pb = self.yatai_service.GetBento(
                GetBentoRequest(
                    bento_name=deployment_spec.bento_name,
                    bento_version=deployment_spec.bento_version,
                ))

            if bento_pb.bento.uri.type not in (BentoUri.LOCAL, BentoUri.S3):
                raise BentoMLException(
                    "BentoML currently not support {} repository".format(
                        BentoUri.StorageType.Name(bento_pb.bento.uri.type)))

            return self._update(
                deployment_pb,
                previous_deployment,
                bento_pb.bento.uri.uri,
                ec2_deployment_config.region,
            )
        except BentoMLException as error:
            deployment_pb.state.state = DeploymentState.ERROR
            deployment_pb.state.error_message = f"Error: {str(error)}"
            return ApplyDeploymentResponse(status=error.status_proto,
                                           deployment=deployment_pb)
예제 #4
0
        def ContainerizeBento(self, request, context=None):
            try:
                ensure_docker_available_or_raise()
                tag = request.tag
                if tag is None or len(tag) == 0:
                    name = to_valid_docker_image_name(request.bento_name)
                    version = to_valid_docker_image_version(
                        request.bento_version)
                    tag = f"{name}:{version}"
                if ":" not in tag:
                    version = to_valid_docker_image_version(
                        request.bento_version)
                    tag = f"{tag}:{version}"
                import docker

                docker_client = docker.from_env()
                bento_pb = self.bento_metadata_store.get(
                    request.bento_name, request.bento_version)
                if not bento_pb:
                    raise YataiRepositoryException(
                        f'BentoService {request.bento_name}:{request.bento_version} '
                        f'does not exist')

                with TempDirectory() as temp_dir:
                    temp_bundle_path = f'{temp_dir}/{bento_pb.name}'
                    bento_service_bundle_path = bento_pb.uri.uri
                    if bento_pb.uri.type == BentoUri.S3:
                        bento_service_bundle_path = self.repo.get(
                            bento_pb.name, bento_pb.version)
                    elif bento_pb.uri.type == BentoUri.GCS:
                        bento_service_bundle_path = self.repo.get(
                            bento_pb.name, bento_pb.version)
                    safe_retrieve(bento_service_bundle_path, temp_bundle_path)
                    try:
                        docker_client.images.build(
                            path=temp_bundle_path,
                            tag=tag,
                            buildargs=dict(request.build_args),
                        )
                    except (docker.errors.APIError,
                            docker.errors.BuildError) as error:
                        logger.error(
                            f'Encounter container building issue: {error}')
                        raise YataiRepositoryException(error)
                    if request.push is True:
                        try:
                            docker_client.images.push(
                                repository=request.repository, tag=tag)
                        except docker.errors.APIError as error:
                            raise YataiRepositoryException(error)

                    return ContainerizeBentoResponse(status=Status.OK(),
                                                     tag=tag)
            except BentoMLException as e:
                logger.error(f"RPC ERROR ContainerizeBento: {e}")
                return ContainerizeBentoResponse(status=e.status_proto)
            except Exception as e:  # pylint: disable=broad-except
                logger.error(f"RPC ERROR ContainerizeBento: {e}")
                return ContainerizeBentoResponse(status=Status.INTERNAL(e))
def test_ensure_docker_available_or_raise():
    with patch('docker.from_env') as from_env_mock:
        from_env_mock.side_effect = docker.errors.DockerException(
            'no docker error')
        with pytest.raises(MissingDependencyException) as error:
            ensure_docker_available_or_raise()
        assert str(error.value).startswith('Docker is required')

        from_env_mock.side_effect = None
        mock_docker_client = Mock()
        mock_docker_client.ping = Mock(
            side_effect=docker.errors.APIError('no response'))
        from_env_mock.return_value = mock_docker_client
        with pytest.raises(MissingDependencyException) as server_error:
            ensure_docker_available_or_raise()
        assert str(
            server_error.value).startswith('Docker server is not responsive.')
def local_yatai_service_container(db_url=None, repo_base_url=None):
    ensure_docker_available_or_raise()
    docker_client = docker.from_env()
    local_bentoml_repo_path = os.path.abspath(__file__ + "/../../../")
    yatai_docker_image_tag = f'bentoml/yatai-service:e2e-test-{uuid.uuid4().hex[:6]}'

    # Note: When set both `custom_context` and `fileobj`, docker api will not use the
    #   `path` provide... docker/api/build.py L138. The solution is create an actual
    #   Dockerfile along with path, instead of fileobj and custom_context.
    with TempDirectory() as temp_dir:
        temp_docker_file_path = os.path.join(temp_dir, 'Dockerfile')
        with open(temp_docker_file_path, 'w') as f:
            f.write(f"""\
FROM bentoml/yatai-service:{LAST_PYPI_RELEASE_VERSION}
ADD . /bentoml-local-repo
RUN pip install -U /bentoml-local-repo
            """)
        logger.info(f'building docker image {yatai_docker_image_tag}')
        docker_client.images.build(
            path=local_bentoml_repo_path,
            dockerfile=temp_docker_file_path,
            tag=yatai_docker_image_tag,
        )

        container_name = f'yatai-service-container-{uuid.uuid4().hex[:6]}'
        yatai_service_url = 'localhost:50051'
        yatai_server_command = ['bentoml', 'yatai-service-start', '--no-ui']
        if db_url:
            yatai_server_command.extend(['--db-url', db_url])
        if repo_base_url:
            yatai_server_command.extend(['--repo-base-url', repo_base_url])
        container = docker_client.containers.run(
            image=yatai_docker_image_tag,
            environment=['BENTOML_HOME=/tmp'],
            ports={'50051/tcp': 50051},
            command=yatai_server_command,
            name=container_name,
            detach=True,
        )

        wait_until_container_ready(container)
        yield yatai_service_url

        logger.info(f"Shutting down docker container: {container_name}")
        container.kill()
예제 #7
0
 def __init__(self, yatai_service):
     super(SageMakerDeploymentOperator, self).__init__(yatai_service)
     ensure_docker_available_or_raise()
예제 #8
0
 def __init__(self, yatai_service):
     super(AwsLambdaDeploymentOperator, self).__init__(yatai_service)
     ensure_docker_available_or_raise()
     ensure_sam_available_or_raise()
예제 #9
0
 def __init__(self, yatai_service):
     super(AzureFunctionsDeploymentOperator, self).__init__(yatai_service)
     ensure_docker_available_or_raise()
     _assert_azure_cli_available()
     _assert_az_cli_logged_in()
예제 #10
0
def deploy_bentoml(clipper_conn,
                   bundle_path,
                   api_name,
                   model_name=None,
                   labels=None,
                   build_envs=None):
    """Deploy bentoml bundle to clipper cluster

    Args:
        clipper_conn(clipper_admin.ClipperConnection): Clipper connection instance
        bundle_path(str): Path to the saved BentomlService bundle.
        api_name(str): name of the api that will be used as prediction function for
            clipper cluster
        model_name(str): Model's name for clipper cluster
        labels(:obj:`list(str)`, optional): labels for clipper model

    Returns:
        tuple: Model name and model version that deployed to clipper

    """
    track("clipper-deploy", {'bento_service_bundle_path': bundle_path})

    build_envs = {} if build_envs is None else build_envs
    # docker is required to build clipper model image
    ensure_docker_available_or_raise()

    if not clipper_conn.connected:
        raise BentoMLException(
            "No connection to Clipper cluster. CallClipperConnection.connect to "
            "connect to an existing cluster or ClipperConnection.start_clipper to "
            "create a new one")

    bento_service_metadata = load_bento_service_metadata(bundle_path)

    try:
        api_metadata = next((api for api in bento_service_metadata.apis
                             if api.name == api_name))
    except StopIteration:
        raise BentoMLException(
            "Can't find API '{}' in BentoService bundle {}".format(
                api_name, bento_service_metadata.name))

    if api_metadata.input_type not in ADAPTER_TYPE_TO_INPUT_TYPE:
        raise BentoMLException(
            "Only BentoService APIs using ClipperInput can be deployed to Clipper"
        )

    input_type = ADAPTER_TYPE_TO_INPUT_TYPE[api_metadata.input_type]
    model_name = model_name or get_clipper_compatible_string(
        bento_service_metadata.name + "-" + api_metadata.name)
    model_version = get_clipper_compatible_string(
        bento_service_metadata.version)

    with TempDirectory() as tempdir:
        entry_py_content = CLIPPER_ENTRY.format(api_name=api_name)
        build_path = os.path.join(tempdir, "bento")
        shutil.copytree(bundle_path, build_path)

        with open(os.path.join(build_path, "clipper_entry.py"), "w") as f:
            f.write(entry_py_content)

        if bento_service_metadata.env.python_version.startswith("3.6"):
            base_image = "clipper/python36-closure-container:0.4.1"
        elif bento_service_metadata.env.python_version.startswith("2.7"):
            base_image = "clipper/python-closure-container:0.4.1"
        else:
            raise BentoMLException(
                "Python version {} is not supported in Clipper".format(
                    bento_service_metadata.env.python_version))

        docker_content = CLIPPER_DOCKERFILE.format(
            model_name=model_name,
            model_version=model_version,
            base_image=base_image,
            pip_index_url=build_envs.get("PIP_INDEX_URL", ""),
            pip_trusted_url=build_envs.get("PIP_TRUSTED_HOST", ""),
        )
        with open(os.path.join(build_path, "Dockerfile-clipper"), "w") as f:
            f.write(docker_content)

        clipper_model_docker_image_tag = "clipper-model-{}:{}".format(
            bento_service_metadata.name.lower(),
            bento_service_metadata.version)
        build_docker_image(build_path, 'Dockerfile-clipper',
                           clipper_model_docker_image_tag)

        logger.info(
            "Successfully built docker image %s for Clipper deployment",
            clipper_model_docker_image_tag,
        )

    clipper_conn.deploy_model(
        name=model_name,
        version=model_version,
        input_type=input_type,
        image=clipper_model_docker_image_tag,
        labels=labels,
    )

    track("clipper-deploy-success", {'bento_service_bundle_path': bundle_path})
    return model_name, model_version