def serve(port, archive_path=archive_path, with_conda=False): if with_conda: config = load_bentoml_config(archive_path) metadata = config['metadata'] env_name = metadata['service_name'] + '_' + metadata[ 'service_version'] pip_req = os.path.join(archive_path, 'requirements.txt') subprocess.call( 'command -v conda >/dev/null 2>&1 || {{ echo >&2 "--with-conda ' 'parameter requires conda but it\'s not installed."; exit 1; }} && ' 'conda env update -n {env_name} -f {env_file} && ' 'conda init bash && ' 'eval "$(conda shell.bash hook)" && ' 'conda activate {env_name} && ' '{{ [ -f {pip_req} ] && pip install -r {pip_req} || echo "no pip ' 'dependencies."; }} &&' 'bentoml serve {archive_path} --port {port}'.format( env_name=env_name, env_file=os.path.join(archive_path, 'environment.yml'), archive_path=archive_path, port=port, pip_req=pip_req, ), shell=True, ) return track_cli('serve') bento_service = load(archive_path) server = BentoAPIServer(bento_service, port=port) server.start()
def serve(port, archive_path=installed_archive_path): """ Start REST API server hosting BentoService loaded from archive """ model_service = load(archive_path) server = BentoAPIServer(model_service, port=port) server.start()
def serve(port, bento=None, with_conda=False, enable_microbatch=False): track_cli('serve') bento_service_bundle_path = resolve_bundle_path( bento, pip_installed_bundle_path) bento_service = load(bento_service_bundle_path) if with_conda: run_with_conda_env( bento_service_bundle_path, 'bentoml serve {bento} --port {port} {flags}'.format( bento=bento_service_bundle_path, port=port, flags="--enable-microbatch" if enable_microbatch else "", ), ) return if enable_microbatch: with reserve_free_port() as api_server_port: # start server right after port released # to reduce potential race marshal_server = MarshalService( bento_service_bundle_path, outbound_host="localhost", outbound_port=api_server_port, outbound_workers=1, ) api_server = BentoAPIServer(bento_service, port=api_server_port) marshal_server.async_start(port=port) api_server.start() else: api_server = BentoAPIServer(bento_service, port=port) api_server.start()
def test_api_function_route(bento_service): rest_server = BentoAPIServer(bento_service) test_client = rest_server.app.test_client() index_list = [] for rule in rest_server.app.url_map.iter_rules(): index_list.append(rule.endpoint) response = test_client.get("/") assert 200 == response.status_code response = test_client.get("/healthz") assert 200 == response.status_code response = test_client.get("/docs.json") assert 200 == response.status_code assert "predict" in index_list data = [{"age": 10}] response = test_client.post( "/predict", data=json.dumps(data), content_type="application/json" ) response_data = json.loads(response.data) assert 15 == response_data[0]["age"] # Test Image handlers. with open(os.path.join(CUR_PATH, "white-plane-sky.jpg"), "rb") as f: img = f.read() response = test_client.post("/predictImage", data=img, content_type="image/png") assert 200 == response.status_code response = test_client.post( "/predictImages", data={ 'original': (BytesIO(img), 'original.jpg'), 'compared': (BytesIO(img), 'compared.jpg'), }, ) assert 200 == response.status_code # Test Fastai Image Handlers. if sys.version_info >= (3, 6): # fast ai is required 3.6 or higher. response = test_client.post( "/predictFastaiImage", data=img, content_type="image/png" ) assert 200 == response.status_code response = test_client.post( "/predictFastaiImages", data={ 'original': (BytesIO(img), 'original.jpg'), 'compared': (BytesIO(img), 'compared.jpg'), }, ) assert 200 == response.status_code
def serve_gunicorn(port, workers, archive_path=installed_archive_path): """ Start REST API gunicorn server hosting BentoService loaded from archive """ model_service = load(archive_path) server = BentoAPIServer(model_service, port=port) gunicorn_app = GunicornApplication(server.app, port, workers) gunicorn_app.run()
def serve(port, bento=None, with_conda=False): track_cli('serve') bento_service_bundle_path = resolve_bundle_path( bento, pip_installed_bundle_path) if with_conda: run_with_conda_env( bento_service_bundle_path, 'bentoml serve {bento} --port {port}'.format( bento=bento_service_bundle_path, port=port, ), ) return bento_service = load(bento_service_bundle_path) server = BentoAPIServer(bento_service, port=port) server.start()
def test_api_function_route(bento_service, tmpdir, img_file): import imageio # noqa # pylint: disable=unused-import import numpy as np # noqa # pylint: disable=unused-import rest_server = BentoAPIServer(bento_service) test_client = rest_server.app.test_client() index_list = [] for rule in rest_server.app.url_map.iter_rules(): index_list.append(rule.endpoint) response = test_client.get("/") assert 200 == response.status_code response = test_client.get("/healthz") assert 200 == response.status_code response = test_client.get("/docs.json") assert 200 == response.status_code assert "predict_dataframe" in index_list data = [{"col1": 10}, {"col1": 20}] response = test_client.post("/predict_dataframe", data=json.dumps(data), content_type="application/json") assert response.data.decode().strip() == '30' assert "predict_dataframe_v1" in index_list data = [{"col1": 10}, {"col1": 20}] response = test_client.post("/predict_dataframe_v1", data=json.dumps(data), content_type="application/json") assert response.data.decode().strip() == '30' # Test ImageInput. with open(str(img_file), "rb") as f: img = f.read() response = test_client.post("/predict_image", data={'image': (BytesIO(img), 'test_img.png')}) assert 200 == response.status_code assert "[10, 10, 3]" in str(response.data) response = test_client.post( "/predict_legacy_images", data={ 'original': (BytesIO(img), 'original.jpg'), 'compared': (BytesIO(img), 'compared.jpg'), }, ) assert 200 == response.status_code
def test_api_function_route(bento_service, tmpdir): import imageio import numpy as np rest_server = BentoAPIServer(bento_service) test_client = rest_server.app.test_client() index_list = [] for rule in rest_server.app.url_map.iter_rules(): index_list.append(rule.endpoint) response = test_client.get("/") assert 200 == response.status_code response = test_client.get("/healthz") assert 200 == response.status_code response = test_client.get("/docs.json") assert 200 == response.status_code assert "predict_dataframe" in index_list data = [{"col1": 10}, {"col1": 20}] response = test_client.post("/predict_dataframe", data=json.dumps(data), content_type="application/json") assert response.data.decode().strip() == '"30"' # Test Image handlers. img_file = tmpdir.join("test_img.png") imageio.imwrite(str(img_file), np.zeros((10, 10))) with open(str(img_file), "rb") as f: img = f.read() response = test_client.post("/predict_image", data={'image': (BytesIO(img), 'test_img.png')}) assert 200 == response.status_code assert "[10, 10, 3]" in str(response.data) response = test_client.post( "/predict_images", data={ 'original': (BytesIO(img), 'original.jpg'), 'compared': (BytesIO(img), 'compared.jpg'), }, ) assert 200 == response.status_code
def test_api_function_route(bento_service): rest_server = BentoAPIServer(bento_service) test_client = rest_server.app.test_client() index_list = [] for rule in rest_server.app.url_map.iter_rules(): index_list.append(rule.endpoint) assert 'predict' in index_list data = [{'age': 10}] response = test_client.post('/predict', data=json.dumps(data), content_type='application/json') response_data = json.loads(response.data) assert 15 == response_data[0]['age']
def test_api_function_route(bento_service): rest_server = BentoAPIServer(bento_service) test_client = rest_server.app.test_client() index_list = [] for rule in rest_server.app.url_map.iter_rules(): index_list.append(rule.endpoint) response = test_client.get("/") assert 200 == response.status_code response = test_client.get("/docs.json") assert 200 == response.status_code assert "predict" in index_list data = [{"age": 10}] response = test_client.post( "/predict", data=json.dumps(data), content_type="application/json" ) response_data = json.loads(response.data) assert 15 == response_data[0]["age"]
def load(self): bento_service = load(self.bento_archive_path) api_server = BentoAPIServer(bento_service, port=self.port) return api_server.app
def serve_gunicorn(port, workers, timeout, archive_path=archive_path): model_service = load(archive_path) server = BentoAPIServer(model_service, port=port) gunicorn_app = GunicornApplication(server.app, port, workers, timeout) gunicorn_app.run()
def serve(port, archive_path=archive_path): model_service = load(archive_path) server = BentoAPIServer(model_service, port=port) server.start()
def create_rest_server(): saved_path = generate_fake_dataframe_model() model_service = bentoml.load(saved_path) rest_server = BentoAPIServer(model_service) return rest_server
def serve(port, archive_path=archive_path): track_cli('serve') bento_service = load(archive_path) server = BentoAPIServer(bento_service, port=port) server.start()
# Copyright 2019 Atalaya Tech, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import azure.functions as func # pylint: disable=import-error from bentoml.server import BentoAPIServer from bentoml import load bento_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) svc = load(bento_path) bento_server = BentoAPIServer(svc) def main(req: func.HttpRequest, context: func.Context) -> func.HttpResponse: return func.WsgiMiddleware(bento_server.app.wsgi_app).handle(req, context)