def open_api_spec(bento=None): saved_bundle_path = resolve_bundle_path(bento, pip_installed_bundle_path) bento_service = load(saved_bundle_path) _echo(json.dumps(get_open_api_spec_json(bento_service), indent=2))
def serve(port, bento=None, with_conda=False, enable_microbatch=False): bento_service_bundle_path = resolve_bundle_path( bento, pip_installed_bundle_path ) bento_service = load(bento_service_bundle_path) if with_conda: return run_with_conda_env( bento_service_bundle_path, 'bentoml serve {bento} --port {port} {flags}'.format( bento=bento_service_bundle_path, port=port, flags="--enable-microbatch" if enable_microbatch else "", ), ) if enable_microbatch: from bentoml.marshal.marshal import MarshalService with reserve_free_port() as api_server_port: # start server right after port released # to reduce potential race marshal_server = MarshalService( bento_service_bundle_path, outbound_host="localhost", outbound_port=api_server_port, outbound_workers=1, ) api_server = BentoAPIServer(bento_service, port=api_server_port) marshal_server.async_start(port=port) api_server.start() else: api_server = BentoAPIServer(bento_service, port=port) api_server.start()
def open_api_spec(bento=None): track_cli('open-api-spec') bento_service_bundle_path = resolve_bundle_path( bento, pip_installed_bundle_path) bento_service = load(bento_service_bundle_path) _echo(json.dumps(get_open_api_spec_json(bento_service), indent=2))
def load(): return saved_bundle.load(__module_path)
def load(self): bento_service = load(self.bento_service_bundle_path) api_server = GunicornBentoAPIServer(bento_service, port=self.port) return api_server.app
# Copyright 2019 Atalaya Tech, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from bentoml.saved_bundle import load from bentoml.yatai.deployment.sagemaker.model_server import BentomlSagemakerServer api_name = os.environ.get('API_NAME', None) model_service = load('/bento') server = BentomlSagemakerServer(model_service, api_name) app = server.app