Esempio n. 1
0
import bentoml

svc = bentoml.Service("test.bentob")
Esempio n. 2
0
    name="multi_ndarray_pred_runner",
    batch=True,
)
echo_multi_ndarray_pred_runner = bentoml.picklable_model.load_runner(
    "sk_model",
    method_name="echo_multi_ndarray",
    name="echo_multi_ndarray_pred_runner",
    batch=True,
)

svc = bentoml.Service(
    name="general_sync",
    runners=[
        json_echo_runner,
        ndarray_pred_runner,
        dataframe_pred_runner,
        file_pred_runner,
        multi_ndarray_pred_runner,
        echo_multi_ndarray_pred_runner,
    ],
)


@svc.api(input=JSON(), output=JSON())
def echo_json(json_obj: JSONSerializable) -> JSONSerializable:
    return json_echo_runner.run(json_obj)


@svc.api(
    input=JSON(pydantic_model=_Schema),
    output=JSON(),
Esempio n. 3
0
import bentoml

# import bentoml.sklearn
# from bentoml.io import NumpyNdarray

# iris_model_runner = bentoml.sklearn.load_runner('iris_classifier:latest')
svc = bentoml.Service("test.simplebento",
                      # runners=[iris_model_runner]
                      )

# @svc.api(input=NumpyNdarray(), output=NumpyNdarray())
# def predict(request_data: np.ndarray):
#     return iris_model_runner.predict(request_data)

# For simple use cases, only models list is required:
# svc.bento_options.models = []
# svc.bento_files.include = ["*"]
# svc.bento_env.pip_install = "./requirements.txt"