Ejemplo n.º 1
0
def _start_dev_server(
    saved_bundle_path: str,
    api_server_port: int,
    enable_swagger: bool,
):
    logger.info("Starting BentoML API server in development mode..")

    from bentoml.server.api_server import BentoAPIServer
    from bentoml.saved_bundle import load_from_dir

    bento_service = load_from_dir(saved_bundle_path)
    api_server = BentoAPIServer(bento_service, enable_swagger=enable_swagger)
    api_server.start(port=api_server_port)
Ejemplo n.º 2
0
def start_dev_server(saved_bundle_path: str, port: int,
                     enable_microbatch: bool, run_with_ngrok: bool):
    logger.info("Starting BentoML API server in development mode..")

    from bentoml import load
    from bentoml.server.api_server import BentoAPIServer
    from bentoml.marshal.marshal import MarshalService
    from bentoml.utils import reserve_free_port

    bento_service = load(saved_bundle_path)

    if run_with_ngrok:
        from bentoml.utils.flask_ngrok import start_ngrok
        from threading import Timer

        thread = Timer(1, start_ngrok, args=(port, ))
        thread.setDaemon(True)
        thread.start()

    if enable_microbatch:
        with reserve_free_port() as api_server_port:
            # start server right after port released
            #  to reduce potential race
            marshal_server = MarshalService(
                saved_bundle_path,
                outbound_host="localhost",
                outbound_port=api_server_port,
                outbound_workers=1,
            )
            api_server = BentoAPIServer(bento_service, port=api_server_port)
        marshal_server.async_start(port=port)
        api_server.start()
    else:
        api_server = BentoAPIServer(bento_service, port=port)
        api_server.start()
Ejemplo n.º 3
0
def start_dev_server(
    saved_bundle_path: str,
    port: int = Provide[BentoMLContainer.config.api_server.port],
    enable_microbatch: bool = Provide[
        BentoMLContainer.config.api_server.enable_microbatch],
    mb_max_batch_size: int = Provide[
        BentoMLContainer.config.marshal_server.max_batch_size],
    mb_max_latency: int = Provide[
        BentoMLContainer.config.marshal_server.max_latency],
    run_with_ngrok: bool = Provide[
        BentoMLContainer.config.api_server.run_with_ngrok],
    enable_swagger: bool = Provide[
        BentoMLContainer.config.api_server.enable_swagger],
):
    logger.info("Starting BentoML API server in development mode..")

    import multiprocessing

    from bentoml.saved_bundle import load_from_dir
    from bentoml.server.api_server import BentoAPIServer
    from bentoml.utils import reserve_free_port

    if run_with_ngrok:
        from threading import Timer

        from bentoml.utils.flask_ngrok import start_ngrok

        thread = Timer(1, start_ngrok, args=(port, ))
        thread.setDaemon(True)
        thread.start()

    if enable_microbatch:
        with reserve_free_port() as api_server_port:
            # start server right after port released
            #  to reduce potential race

            marshal_proc = multiprocessing.Process(
                target=start_dev_batching_server,
                kwargs=dict(
                    api_server_port=api_server_port,
                    saved_bundle_path=saved_bundle_path,
                    port=port,
                    mb_max_latency=mb_max_latency,
                    mb_max_batch_size=mb_max_batch_size,
                ),
                daemon=True,
            )
        marshal_proc.start()

        bento_service = load_from_dir(saved_bundle_path)
        api_server = BentoAPIServer(bento_service,
                                    port=api_server_port,
                                    enable_swagger=enable_swagger)
        api_server.start()
    else:
        bento_service = load_from_dir(saved_bundle_path)
        api_server = BentoAPIServer(bento_service,
                                    port=port,
                                    enable_swagger=enable_swagger)
        api_server.start()
Ejemplo n.º 4
0
def test_api_function_route(bento_service, img_file):
    import imageio  # noqa # pylint: disable=unused-import
    import numpy as np  # noqa # pylint: disable=unused-import

    rest_server = BentoAPIServer(bento_service)
    test_client = rest_server.app.test_client()

    index_list = []
    for rule in rest_server.app.url_map.iter_rules():
        index_list.append(rule.endpoint)

    response = test_client.get("/")
    assert 200 == response.status_code

    response = test_client.get("/healthz")
    assert 200 == response.status_code

    response = test_client.get("/metadata")
    assert 200 == response.status_code

    response = test_client.get("/docs.json")
    assert 200 == response.status_code

    assert "predict_dataframe" in index_list
    data = [{"col1": 10}, {"col1": 20}]
    response = test_client.post(
        "/predict_dataframe", data=json.dumps(data), content_type="application/json"
    )
    assert response.data.decode().strip() == '[{"col1":20},{"col1":40}]'

    assert "predict_dataframe_v1" in index_list
    data = [{"col1": 10}, {"col1": 20}]
    response = test_client.post(
        "/predict_dataframe_v1", data=json.dumps(data), content_type="application/json"
    )
    assert response.data.decode().strip() == '[{"col1":20},{"col1":40}]'

    # Test ImageInput.
    with open(str(img_file), "rb") as f:
        img = f.read()

    response = test_client.post(
        "/predict_image", data={'image': (BytesIO(img), 'test_img.png')}
    )
    assert 200 == response.status_code
    assert "[10, 10, 3]" in str(response.data)

    response = test_client.post(
        "/predict_multi_images",
        data={
            'original': (BytesIO(img), 'original.jpg'),
            'compared': (BytesIO(img), 'compared.jpg'),
        },
    )
    assert 200 == response.status_code
Ejemplo n.º 5
0
def _start_dev_server(
    saved_bundle_path: str,
    api_server_port: int,
    config: BentoMLConfiguration,
):

    logger.info("Starting BentoML API server in development mode..")

    from bentoml.saved_bundle import load_from_dir

    bento_service = load_from_dir(saved_bundle_path)

    from bentoml.server.api_server import BentoAPIServer

    container = BentoMLContainer()
    container.config.from_dict(config.as_dict())
    container.wire(packages=[sys.modules[__name__]])

    api_server = BentoAPIServer(bento_service)
    api_server.start(port=api_server_port)
Ejemplo n.º 6
0
def test_api_function_route_with_disabled_swagger(bento_service):
    rest_server = BentoAPIServer(bento_service, enable_swagger=False)
    test_client = rest_server.app.test_client()

    response = test_client.get("/")
    assert 404 == response.status_code

    response = test_client.get("/docs")
    assert 404 == response.status_code

    response = test_client.get("/healthz")
    assert 200 == response.status_code

    response = test_client.get("/docs.json")
    assert 200 == response.status_code
Ejemplo n.º 7
0
def test_api_function_route_with_disabled_swagger(bento_service):
    rest_server = BentoAPIServer(
        bento_service=bento_service,
        enable_swagger=False,
        enable_metrics=True,
        enable_feedback=True,
        request_header_flag="BentoML-Is-Merged-Request",
    )
    test_client = rest_server.app.test_client()

    response = test_client.get("/")
    assert 404 == response.status_code

    response = test_client.get("/docs")
    assert 404 == response.status_code

    response = test_client.get("/healthz")
    assert 200 == response.status_code

    response = test_client.get("/docs.json")
    assert 200 == response.status_code
Ejemplo n.º 8
0
# Copyright 2019 Atalaya Tech, Inc.

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at

# http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import azure.functions as func  # pylint: disable=import-error

from bentoml.server.api_server import BentoAPIServer
from bentoml.saved_bundle import load_from_dir

bento_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
svc = load_from_dir(bento_path)

bento_server = BentoAPIServer(svc)


def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse:
    return func.WsgiMiddleware(bento_server.app.wsgi_app).handle(req, context)
Ejemplo n.º 9
0
def test_api_function_route(bento_service, img_file):
    import imageio  # noqa # pylint: disable=unused-import
    import numpy as np  # noqa # pylint: disable=unused-import

    rest_server = BentoAPIServer(
        bento_service=bento_service,
        port=5000,
        enable_swagger=True,
        enable_metrics=True,
        enable_feedback=True,
        request_header_flag="BentoML-Is-Merged-Request",
    )
    test_client = rest_server.app.test_client()

    index_list = []
    for rule in rest_server.app.url_map.iter_rules():
        index_list.append(rule.endpoint)

    response = test_client.get("/")
    assert 200 == response.status_code

    response = test_client.get("/healthz")
    assert 200 == response.status_code

    response = test_client.get("/metadata")
    assert 200 == response.status_code

    response = test_client.get("/docs.json")
    assert 200 == response.status_code
    docs = json.loads(response.data.decode())
    assert f"/{CUSTOM_ROUTE}" in docs["paths"]

    response = test_client.post(
        f"/{CUSTOM_ROUTE}",
        data='{"a": 1}',
    )
    assert 200 == response.status_code
    assert '{"a": 1}' == response.data.decode()

    assert "predict_dataframe" in index_list
    data = [{"col1": 10}, {"col1": 20}]
    response = test_client.post("/predict_dataframe",
                                data=json.dumps(data),
                                content_type="application/json")
    assert response.data.decode().strip() == '[{"col1":20},{"col1":40}]'

    assert "predict_dataframe_v1" in index_list
    data = [{"col1": 10}, {"col1": 20}]
    response = test_client.post("/predict_dataframe_v1",
                                data=json.dumps(data),
                                content_type="application/json")
    assert response.data.decode().strip() == '[{"col1":20},{"col1":40}]'

    # Test ImageInput.
    with open(str(img_file), "rb") as f:
        img = f.read()

    response = test_client.post("/predict_image",
                                data={'image': (BytesIO(img), 'test_img.png')})
    assert 200 == response.status_code
    assert "[10, 10, 3]" in str(response.data)

    response = test_client.post(
        "/predict_multi_images",
        data={
            'original': (BytesIO(img), 'original.jpg'),
            'compared': (BytesIO(img), 'compared.jpg'),
        },
    )
    assert 200 == response.status_code