def test_dataframe_request_schema(): input_adapter = DataframeInput( dtype={"col1": "int", "col2": "float", "col3": "string"} ) schema = input_adapter.request_schema["application/json"]["schema"] assert "object" == schema["type"] assert 3 == len(schema["properties"]) assert "array" == schema["properties"]["col1"]["type"] assert "integer" == schema["properties"]["col1"]["items"]["type"] assert "number" == schema["properties"]["col2"]["items"]["type"] assert "string" == schema["properties"]["col3"]["items"]["type"]
class ExampleBentoService(bentoml.BentoService): """ Example BentoService class made for testing purpose """ @bentoml.api(input=JsonInput(), mb_max_latency=1000, mb_max_batch_size=2000, batch=True) def predict_with_sklearn(self, jsons): """predict_dataframe expects dataframe as input """ return self.artifacts.sk_model.predict(jsons) @bentoml.api( input=DataframeInput(dtype={"col1": "int"}), mb_max_latency=1000, mb_max_batch_size=2000, batch=True, ) def predict_dataframe(self, df): """predict_dataframe expects dataframe as input """ return self.artifacts.model.predict_dataframe(df) @bentoml.api(DataframeHandler, dtype={"col1": "int"}, batch=True) # deprecated def predict_dataframe_v1(self, df): """predict_dataframe expects dataframe as input """ return self.artifacts.model.predict_dataframe(df) @bentoml.api(input=ImageInput(), batch=True) def predict_image(self, images): return self.artifacts.model.predict_image(images) @bentoml.api(input=FileInput(), batch=True) def predict_file(self, files): return self.artifacts.model.predict_file(files) @bentoml.api(input=LegacyImageInput(input_names=('original', 'compared'))) def predict_legacy_images(self, original, compared): return self.artifacts.model.predict_legacy_images(original, compared) @bentoml.api(input=JsonInput(), batch=True) def predict_json(self, input_datas): return self.artifacts.model.predict_json(input_datas) @bentoml.api(input=JsonInput(), mb_max_latency=10000 * 1000, batch=True) def echo_with_delay(self, input_datas): data = input_datas[0] time.sleep(data['b'] + data['a'] * len(input_datas)) return input_datas
def test_dataframe_handle_request_csv(make_api): def test_func(df): return df["name"] input_adapter = DataframeInput() api = make_api(input_adapter, test_func) csv_data = b'name,game,city\njohn,mario,sf' request = MagicMock(spec=flask.Request) request.headers = {'Content-Type': 'text/csv'} request.get_data.return_value = csv_data result = api.handle_request(request) assert result.get_data().decode('utf-8') == '[{"name":"john"}]'
class ExampleBentoService(bentoml.BentoService): """ Example BentoService class made for testing purpose """ @bentoml.api(input=DataframeInput(), mb_max_latency=1000, mb_max_batch_size=2000) def predict(self, df): """An API for testing simple bento model service """ return self.artifacts.model.predict(df) @bentoml.api(input=DataframeInput(input_dtypes={"col1": "int"})) def predict_dataframe(self, df): """predict_dataframe expects dataframe as input """ return self.artifacts.model.predict_dataframe(df) @bentoml.api(DataframeHandler, input_dtypes={"col1": "int"}) # deprecated def predict_dataframe_v1(self, df): """predict_dataframe expects dataframe as input """ return self.artifacts.model.predict_dataframe(df) @bentoml.api(input=ImageInput()) def predict_image(self, images): return self.artifacts.model.predict_image(images) @bentoml.api(input=LegacyImageInput(input_names=('original', 'compared'))) def predict_legacy_images(self, original, compared): return self.artifacts.model.predict_legacy_images(original, compared) @bentoml.api(input=JsonInput()) def predict_json(self, input_data): return self.artifacts.model.predict_json(input_data) @bentoml.api(input=LegacyJsonInput()) def predict_legacy_json(self, input_data): return self.artifacts.model.predict_legacy_json(input_data)
class ElectricityConsumptionRegressorService(BentoService): """ A prediction service exposing a sklearn model """ @api(input=DataframeInput(), batch=True) def predict(self, df: pd.DataFrame): """ An inference API named `predict` with Dataframe input adapter, which codifies how HTTP requests or CSV files are converted to a pandas Dataframe object as the inference API function input """ data = pd.DataFrame(df) return self.artifacts.model.predict(data)
class IrisClassifier(bentoml.BentoService): @bentoml.api(input=DataframeInput()) def predict(self, df): df = dummy_util_func(df) df = dependency_in_local_module_directory(df) df = nested_dependency_func(df) from tests.bento_service_examples.local_dependencies.dynamically_imported_dependency import ( # noqa: E501 dynamically_imported_dependency_func, ) df = dynamically_imported_dependency_func(df) return self.artifacts.model.predict(df)
class IrisClassifier(BentoService): """ A minimum prediction service exposing a Scikit-learn model """ @api(input=DataframeInput(), batch=True) def predict(self, df: pd.DataFrame): """ An inference API named `predict` with Dataframe input adapter, which codifies how HTTP requests or CSV files are converted to a pandas Dataframe object as the inference API function input """ return self.artifacts.model.predict(df)
class LinearRegressionService(BentoService): """ LR 모델을 MLFlow로 부터 서빙하는 BentoService """ @api(input=DataframeInput(), batch=True) def predict(self, df: pd.DataFrame): """ An inference API named `predict` with Dataframe input adapter, which codifies how HTTP requests or CSV files are converted to a pandas Dataframe object as the inference API function input """ return self.artifacts.model.predict(df)
def test_dataframe_handle_cli(capsys, make_api, tmpdir): def test_func(df): return df["name"] input_adapter = DataframeInput() api = make_api(input_adapter, test_func) json_file = tmpdir.join("test.csv") with open(str(json_file), "w") as f: f.write('name,game,city\njohn,mario,sf') test_args = ["--input-file", str(json_file), "--format", "csv"] api.handle_cli(test_args) out, _ = capsys.readouterr() assert "john" in out
def test_dataframe_handle_cli(capsys, make_api, tmpdir): def test_func(df): return df["name"] input_adapter = DataframeInput() api = make_api(input_adapter, test_func) json_file = tmpdir.join("test.json") with open(str(json_file), "w") as f: f.write('[{"name": "john","game": "mario","city": "sf"}]') test_args = ["--input-file", str(json_file)] api.handle_cli(test_args) out, _ = capsys.readouterr() assert "john" in out
class ToxicCommentClassification(BentoService): def tokenize_df(self, df): comments = df['comment_text'].values tokenized = self.artifacts.x_tokenizer.texts_to_sequences(comments) input_data = sequence.pad_sequences(tokenized, maxlen=max_text_length) return input_data @api(input=DataframeInput(), output=JsonOutput(), batch=True) def predict(self, df: pd.DataFrame) -> List[str]: input_data = self.tokenize_df(df) prediction = self.artifacts.model.predict(input_data) result = [] for i in prediction: result.append(list_of_classes[np.argmax(i)]) return result
class PaddleService(bentoml.BentoService): @bentoml.api(input=DataframeInput(), batch=True) def predict(self, df): input_data = df.to_numpy().astype(np.float32) predictor = self.artifacts.model input_names = predictor.get_input_names() input_handle = predictor.get_input_handle(input_names[0]) input_handle.reshape(input_data.shape) input_handle.copy_from_cpu(input_data) predictor.run() output_names = predictor.get_output_names() output_handle = predictor.get_output_handle(output_names[0]) output_data = output_handle.copy_to_cpu() return output_data
def test_custom_api_name(): # these names should work: bentoml.api(input=DataframeInput(), api_name="a_valid_name")(lambda x: x) bentoml.api(input=DataframeInput(), api_name="AValidName")(lambda x: x) bentoml.api(input=DataframeInput(), api_name="_AValidName")(lambda x: x) bentoml.api(input=DataframeInput(), api_name="a_valid_name_123")(lambda x: x) with pytest.raises(InvalidArgument) as e: bentoml.api(input=DataframeInput(), api_name="a invalid name")(lambda x: x) assert str(e.value).startswith("Invalid API name") with pytest.raises(InvalidArgument) as e: bentoml.api(input=DataframeInput(), api_name="123_a_invalid_name")(lambda x: x) assert str(e.value).startswith("Invalid API name") with pytest.raises(InvalidArgument) as e: bentoml.api(input=DataframeInput(), api_name="a-invalid-name")(lambda x: x) assert str(e.value).startswith("Invalid API name")
class IrisClassifier(BentoService): @api(input=DataframeInput(), batch=True) def predict(self, df: pd.DataFrame): return self.artifacts.model.predict(df)
class IrisClassifier(BentoService): @api(input=DataframeInput()) def predict(self, df): return self.artifacts.clf.predict(df)
class NycTaxiPredictionRFService(bentoml.BentoService): @bentoml.api(input=DataframeInput(), batch=True) def predict(self, df): print(f"Prediction! data : {df}") return self.artifacts.model.predict(df)
class MyClassifier(BentoService): @api(input=DataframeInput(), batch=True) def predict(self, values): return self.artifacts.model.predict(values)
class TestBentoWithOutArtifact(bentoml.BentoService): __test__ = False @bentoml.api(input=DataframeInput()) def test(self, df): return df
class ServiceWithList(bentoml.BentoService): @bentoml.api(input=DataframeInput(), batch=True) def predict(self, df): return df
class PytorchLightningService(bentoml.BentoService): @bentoml.api(input=DataframeInput(), batch=True) def predict(self, df): input_tensor = torch.from_numpy(df.to_numpy()) return self.artifacts.model(input_tensor).numpy()
class LgbModelService(bentoml.BentoService): @bentoml.api(input=DataframeInput(), batch=True) def predict(self, df): return self.artifacts.model.predict(df)
class ExampleService(bentoml.BentoService): """ Example BentoService class made for testing purpose """ @bentoml.api( input=DataframeInput(dtype={"col1": "int"}), mb_max_latency=1000, mb_max_batch_size=2000, batch=True, ) def predict_dataframe(self, df): return self.artifacts.model.predict_dataframe(df) @bentoml.api(DataframeHandler, dtype={"col1": "int"}, batch=True) # deprecated def predict_dataframe_v1(self, df): return self.artifacts.model.predict_dataframe(df) @bentoml.api( input=MultiImageInput(input_names=('original', 'compared')), batch=True ) def predict_multi_images(self, originals, compareds): return self.artifacts.model.predict_multi_images(originals, compareds) @bentoml.api(input=ImageInput(), batch=True) def predict_image(self, images): return self.artifacts.model.predict_image(images) @bentoml.api( input=JsonInput(), mb_max_latency=1000, mb_max_batch_size=2000, batch=True, ) def predict_with_sklearn(self, jsons): return self.artifacts.sk_model.predict(jsons) @bentoml.api(input=FileInput(), batch=True) def predict_file(self, files): return self.artifacts.model.predict_file(files) @bentoml.api(input=JsonInput(), batch=True) def predict_json(self, input_datas): return self.artifacts.model.predict_json(input_datas) CUSTOM_ROUTE = "$~!@%^&*()_-+=[]\\|;:,./predict" @bentoml.api( route=CUSTOM_ROUTE, input=JsonInput(), batch=True, ) def customezed_route(self, input_datas): return input_datas CUSTOM_SCHEMA = { "application/json": { "schema": { "type": "object", "required": ["field1", "field2"], "properties": { "field1": {"type": "string"}, "field2": {"type": "uuid"}, }, }, } } @bentoml.api(input=JsonInput(request_schema=CUSTOM_SCHEMA), batch=True) def customezed_schema(self, input_datas): return input_datas @bentoml.api(input=JsonInput(), batch=True) def predict_strict_json(self, input_datas, tasks: Sequence[InferenceTask] = None): filtered_jsons = [] for j, t in zip(input_datas, tasks): if t.http_headers.content_type != "application/json": t.discard(http_status=400, err_msg="application/json only") else: filtered_jsons.append(j) return self.artifacts.model.predict_json(filtered_jsons) @bentoml.api(input=JsonInput(), batch=True) def predict_direct_json(self, input_datas, tasks: Sequence[InferenceTask] = None): filtered_jsons = [] for j, t in zip(input_datas, tasks): if t.http_headers.content_type != "application/json": t.discard(http_status=400, err_msg="application/json only") else: filtered_jsons.append(j) rets = self.artifacts.model.predict_json(filtered_jsons) return [ InferenceResult(http_status=200, data=json.dumps(result)) for result in rets ] @bentoml.api(input=JsonInput(), mb_max_latency=10000 * 1000, batch=True) def echo_with_delay(self, input_datas): data = input_datas[0] time.sleep(data['b'] + data['a'] * len(input_datas)) return input_datas @bentoml.api(input=JsonInput(), mb_max_latency=10000 * 1000, batch=True) def echo_batch_size(self, input_datas=10): data = input_datas[0] time.sleep(data['b'] + data['a'] * len(input_datas)) batch_size = len(input_datas) return [batch_size] * batch_size @bentoml.api(input=JsonInput()) def echo_json(self, input_data): return input_data if version.parse(BENTOML_VERSION) > version.parse("0.12.1+0"): @bentoml.api(input=JsonInput(), output=JsonOutput(ensure_ascii=True)) def echo_json_ensure_ascii(self, input_data): return input_data
class LoanPrediction(BentoService): @api(input=DataframeInput()) def predict(self, df): h2o_frame = h2o.H2OFrame(df, na_strings=['NaN']) predictions = self.artifacts.model.predict(h2o_frame) return predictions.as_data_frame()
class ServiceWithCondaDeps(bentoml.BentoService): @bentoml.api(input=DataframeInput()) def predict(self, df): return df
class IrisClassifier(BentoService): @api(input=DataframeInput()) def predict(self, df): # Optional pre-processing, post-processing code goes here return self.artifacts.model.predict(df)
class LeagueWinPrediction(BentoService): @api(input=DataframeInput(), batch=True) def predict(self, df): dmatrix = xgb.DMatrix(df) return self.artifacts.model.predict(dmatrix)
class IrisClassifier(bentoml.BentoService): @bentoml.api(input=DataframeInput()) def predict(self, df): return self.artifacts.model.predict(df)
class EvalMLClassifier(bentoml.BentoService): @bentoml.api(input=DataframeInput(), batch=True) def predict(self, df): return self.artifacts.model.predict(df).to_series().to_numpy()
class ExampleService(bentoml.BentoService): """ Example BentoService class made for testing purpose """ @bentoml.api( input=DataframeInput(dtype={"col1": "int"}), mb_max_latency=1000, mb_max_batch_size=2000, batch=True, ) def predict_dataframe(self, df): return self.artifacts.model.predict_dataframe(df) @bentoml.api(DataframeHandler, dtype={"col1": "int"}, batch=True) # deprecated def predict_dataframe_v1(self, df): return self.artifacts.model.predict_dataframe(df) @bentoml.api(input=MultiImageInput(input_names=('original', 'compared')), batch=True) def predict_multi_images(self, originals, compareds): return self.artifacts.model.predict_multi_images(originals, compareds) @bentoml.api(input=ImageInput(), batch=True) def predict_image(self, images): return self.artifacts.model.predict_image(images) @bentoml.api( input=JsonInput(), mb_max_latency=1000, mb_max_batch_size=2000, batch=True, ) def predict_with_sklearn(self, jsons): return self.artifacts.sk_model.predict(jsons) @bentoml.api(input=FileInput(), batch=True) def predict_file(self, files): return self.artifacts.model.predict_file(files) @bentoml.api(input=JsonInput(), batch=True) def predict_json(self, input_datas): return self.artifacts.model.predict_json(input_datas) CUSTOM_ROUTE = "$~!@%^&*()_-+=[]\\|;:,./predict" @bentoml.api( route=CUSTOM_ROUTE, input=JsonInput(), batch=True, ) def customezed_route(self, input_datas): return input_datas @bentoml.api(input=JsonInput(), batch=True) def predict_strict_json(self, input_datas, tasks: Sequence[InferenceTask] = None): filtered_jsons = [] for j, t in zip(input_datas, tasks): if t.http_headers.content_type != "application/json": t.discard(http_status=400, err_msg="application/json only") else: filtered_jsons.append(j) return self.artifacts.model.predict_json(filtered_jsons) @bentoml.api(input=JsonInput(), batch=True) def predict_direct_json(self, input_datas, tasks: Sequence[InferenceTask] = None): filtered_jsons = [] for j, t in zip(input_datas, tasks): if t.http_headers.content_type != "application/json": t.discard(http_status=400, err_msg="application/json only") else: filtered_jsons.append(j) rets = self.artifacts.model.predict_json(filtered_jsons) return [ InferenceResult(http_status=200, data=json.dumps(result)) for result in rets ] @bentoml.api(input=JsonInput(), mb_max_latency=10000 * 1000, batch=True) def echo_with_delay(self, input_datas): data = input_datas[0] time.sleep(data['b'] + data['a'] * len(input_datas)) return input_datas
class TitanicSurvivalPredictionXgBoost(bentoml.BentoService): @bentoml.api(input=DataframeInput()) def predict(self, df): data = xgb.DMatrix( data=df[['Pclass', 'Age', 'Fare', 'SibSp', 'Parch']]) return self.artifacts.model.predict(data)