Exemple #1
0
from bentoml import api, env, BentoService, artifacts
from bentoml.artifact import TensorflowSavedModelArtifact, PickleArtifact
from bentoml.handlers import JsonHandler

import numpy as np
from scipy.special import softmax, expit

from aispace.datasets.tokenizer import BertTokenizer
from aispace.utils.hparams import Hparams
from aispace.utils.str_utils import uuid_maker, preprocess_text, compute_md5_hash


@artifacts([
        TensorflowSavedModelArtifact('model'),
        PickleArtifact('tokenizer'),
        PickleArtifact("hparams"),
    ])
@env(auto_pip_dependencies=True)
class BertQAWithImpossibleService(BentoService):

    def preprocessing(self, parsed_json):
        unique_id = 100000
        for one_json in parsed_json:
            n_best_size = one_json.get('n_best_size', 5)
            threshold = one_json.get('threshold', 0.5)
            max_answer_length = one_json.get("max_answer_length", 64)
            max_query_length = one_json.get("max_query_length", 64)
            doc_stride = one_json.get("doc_stride", 128)
            question_text = one_json.get("question_text", "")
            trigger = one_json.get("trigger", "")
Exemple #2
0
    delete_deployment_command = [
        'bentoml',
        'sagemaker',
        'delete',
        deployment_name,
        '--force',
    ]
    logger.info(f'Delete command: {delete_deployment_command}')
    with subprocess.Popen(
        delete_deployment_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
    ) as proc:
        delete_deployment_stdout = proc.stdout.read().decode('utf-8')
    logger.info(delete_deployment_stdout)


@artifacts([PickleArtifact('clf')])
@env(pip_dependencies=['scikit-learn'])
class IrisClassifier(BentoService):
    @api(DataframeHandler)
    def predict(self, df):
        return self.artifacts.clf.predict(df)


if __name__ == '__main__':
    deployment_failed = False
    random_hash = uuid.uuid4().hex[:6]
    deployment_name = f'tests-lambda-e2e-{random_hash}'
    region = 'us-west-2'

    args = sys.argv
    bento_name = None
import pandas as pd
import bentoml
from bentoml.artifact import PickleArtifact
from bentoml.handlers import DataframeHandler


@bentoml.artifacts([PickleArtifact('sentiment_lr')])
@bentoml.env(conda_dependencies=["scikit-learn", "pandas"])
class SentimentLRModel(bentoml.BentoService):
    @bentoml.api(DataframeHandler)
    def predict(self, df):
        """
        predict expects dataframe as input
        """
        return self.artifacts.sentiment_lr.predict(df)
Exemple #4
0
# holt.py
from bentoml import env, artifacts, api, BentoService
from bentoml.handlers import DataframeHandler, DataframeInput
from bentoml.artifact import PickleArtifact
import numpy as np


@env(pip_dependencies=["statsmodels==0.10.1", "joblib", "numpy"],
     conda_dependencies=["ruamel.yaml==0.16"])
@artifacts([PickleArtifact('model')])
class holt(BentoService):
    @api(input=DataframeInput())
    def predict(self, df):

        # Printing the dataframe to cross-cjheck
        print(df.head())

        # Parsing the dataframe values
        weeks = int(df.iat[0, 0])
        print(weeks)
        return ((self.artifacts.model).forecast(weeks))
import bentoml
from bentoml.adapters import (
    DataframeInput,
    ImageInput,
    LegacyImageInput,
    JsonInput,
    LegacyJsonInput,
    # FastaiImageInput,
)
from bentoml.handlers import DataframeHandler  # deprecated
from bentoml.artifact import PickleArtifact


@bentoml.artifacts([PickleArtifact("model")])
@bentoml.env(auto_pip_dependencies=True)
class ExampleBentoService(bentoml.BentoService):
    """
    Example BentoService class made for testing purpose
    """
    @bentoml.api(input=DataframeInput(),
                 mb_max_latency=1000,
                 mb_max_batch_size=2000)
    def predict(self, df):
        """An API for testing simple bento model service
        """
        return self.artifacts.model.predict(df)

    @bentoml.api(input=DataframeInput(input_dtypes={"col1": "int"}))
    def predict_dataframe(self, df):
        """predict_dataframe expects dataframe as input
        """
import base64

from bentoml import BentoService, api, artifacts
from bentoml.artifact import PickleArtifact
from bentoml.handlers import ImageHandler


class TestImageModel(object):
    def predict(self, image_ndarray):
        return image_ndarray.shape


@artifacts([PickleArtifact("clf")])
class ImageHandlerModel(BentoService):
    @api(ImageHandler)
    def predict(self, input_data):
        return self.artifacts.clf.predict(input_data)


def test_image_handler(capsys, tmpdir):
    test_model = TestImageModel()
    ms = ImageHandlerModel.pack(clf=test_model)
    api = ms.get_service_apis()[0]

    import cv2
    import numpy as np

    img_file = tmpdir.join("img.png")
    cv2.imwrite(str(img_file), np.zeros((10, 10)))

    test_args = ["--input={}".format(img_file)]
Exemple #7
0
import pytest

sys.path.insert(0,
                os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import bentoml  # noqa: E402
from bentoml.artifact import PickleArtifact  # noqa: E402


class TestModel(object):
    def predict(self, df):
        df['age'] = df['age'].add(5)
        return df


@bentoml.artifacts([PickleArtifact('model')])
@bentoml.env()
class TestBentoService(bentoml.BentoService):
    """My RestServiceTestModel packaging with BentoML
    """
    @bentoml.api(bentoml.handlers.DataframeHandler, input_columns=['age'])
    def predict(self, df):
        """predict expects dataframe as input
        """
        return self.artifacts.model.predict(df)


@pytest.fixture()
def bento_service():
    """Create a new TestBentoService
    """
import pandas as pd
import bentoml
from bentoml.artifact import PickleArtifact
from bentoml.handlers import DataframeHandler
from data_preprocess import Posts
from word_embedding_vectorizer import WordEmbeddingVectorizer
from gensim.models import Word2Vec

@bentoml.artifacts([PickleArtifact('word_vectorizer'),
                    PickleArtifact('word_embedding_rf')]) 

@bentoml.env(pip_dependencies=["pandas", "numpy", "gensim", "scikit-learn", "nltk"])

class WordEmbeddingModel(bentoml.BentoService):
        
    @bentoml.api(DataframeHandler, typ='series')
    def preprocess(self, series):
        preprocess_series = Posts(series).preprocess()
        input_matrix = self.artifacts.word_vectorizer.fit(preprocess_series).transform(preprocess_series)
        return input_matrix
    
    @bentoml.api(DataframeHandler, typ='series')
    def predict(self, series):
        input_matrix = self.preprocess(series)
        pred_labels = self.artifacts.word_embedding_rf.predict(input_matrix)
        pred_proba = self.artifacts.word_embedding_rf.predict_proba(input_matrix)
        confidence_score = [prob[1] for prob in pred_proba]
        output = pd.DataFrame({'text': series, 'confidence_score': confidence_score, 'labels': pred_labels})
        output['labels'] = output['labels'].map({1: 'stress', 0: 'non-stress'})
        
        return output
from sklearn.ensemble import RandomForestRegressor

import bentoml
from bentoml.saved_bundle import save_to_dir
from bentoml.adapters import (
    DataframeInput,
    ImageInput,
    LegacyImageInput,
    JsonInput,
    # FastaiImageInput,
)
from bentoml.handlers import DataframeHandler  # deprecated
from bentoml.artifact import PickleArtifact, SklearnModelArtifact


@bentoml.artifacts([PickleArtifact("model"), SklearnModelArtifact('sk_model')])
@bentoml.env(auto_pip_dependencies=True)
class ExampleBentoService(bentoml.BentoService):
    """
    Example BentoService class made for testing purpose
    """

    @bentoml.api(
        input=JsonInput(), mb_max_latency=1000, mb_max_batch_size=2000,
    )
    def predict_with_sklearn(self, jsons):
        """predict_dataframe expects dataframe as input
        """
        return self.artifacts.sk_model.predict(jsons)

    @bentoml.api(
Exemple #10
0
import pandas as pd
from tensorflow import keras
import tensorflow as tf
from string import digits
from bentoml import api, env, BentoService, artifacts
from bentoml.artifact import TfKerasModelArtifact, PickleArtifact
from bentoml.handlers import JsonHandler


@artifacts([TfKerasModelArtifact('model'), PickleArtifact('vectorizer')])
@env(conda_dependencies=['tensorflow', 'scikit-learn'])
class TextClassificationService(BentoService):
    @api(JsonHandler)
    def predict(self, parsed_json):
        text = parsed_json['text']
        remove_digits = str.maketrans('', '', digits)
        text = text.translate(remove_digits)
        text = self.artifacts.vectorizer.transform([text])
        prediction = self.artifacts.model.predict_classes(text)[0][0]
        if prediction == 0:
            response = {'Sentiment': 'Negative'}
        elif prediction == 1:
            response = {'Sentiment': 'Positive'}

        return response
import bentoml
import pandas as pd
import numpy as np

from bentoml.artifact import PickleArtifact
# from bentoml.adapters import DataframeInput
from bentoml.handlers import DataframeHandler
from bentoml.handlers import JsonHandler

@bentoml.ver(1, 0)
@bentoml.artifacts([
    PickleArtifact("knn"),
    PickleArtifact("index_map"),
    PickleArtifact("cluster_path"),
    PickleArtifact("pop_matrix"),
])


class ClusteredKNN(bentoml.BentoService):

    def get_index(self, item):
        if item in self.artifacts.index_map:
            return self.artifacts.index_map[item]
        else:
            return 0

    def setup_scores(self, features, n_neighbors):
        neighbors_idxs = self.artifacts.knn.kneighbors(X=features, n_neighbors=n_neighbors, return_distance=False) # get indexes of neighbors
        knclusters = self.artifacts.cluster_path.labels_[neighbors_idxs] # get clusters of neighbors
        clicks = [self.artifacts.pop_matrix[c] for c in knclusters] # create an array with the number of item iteractions per cluster (per item)
        clicks = np.asarray(clicks[0])
import pandas as pd
import numpy as np
from tensorflow import keras
from tensorflow.keras.preprocessing import sequence, text
from bentoml import api, env, BentoService, artifacts
from bentoml.artifact import KerasModelArtifact, PickleArtifact
from bentoml.handlers import JsonHandler

max_features = 1000


@artifacts([KerasModelArtifact('model'), PickleArtifact('word_index')])
@env(pip_dependencies=['tensorflow', 'numpy', 'pandas'])
class TextClassificationService(BentoService):
    def word_to_index(self, word):
        if word in self.artifacts.word_index and self.artifacts.word_index[
                word] <= max_features:
            return self.artifacts.word_index[word]
        else:
            return self.artifacts.word_index["<UNK>"]

    def preprocessing(self, text_str):
        sequence = text.text_to_word_sequence(text_str)
        return list(map(self.word_to_index, sequence))

    @api(JsonHandler)
    def predict(self, parsed_json):
        if type(parsed_json) == list:
            input_data = list(map(self.preprocessing, parsed_json))
        else:  # expecting type(parsed_json) == dict:
            input_data = [self.preprocessing(parsed_json['text'])]
Exemple #13
0
    with pytest.raises(ValueError):
        _validate_version_str('44&')


def test_validate_version_str_pass():
    _validate_version_str('abc_123')


class MyTestModel(object):
    def predict(self, input_data):
        return int(input_data) * 2


@bentoml.env(conda_pip_dependencies=['scikit-learn'])
@bentoml.artifacts([
    PickleArtifact('model')
])
class MyTestBentoService(bentoml.BentoService):

    @bentoml.api(bentoml.handlers.DataframeHandler)
    def predict(self, df):
        """
        An API for testing simple bento model service
        """
        return self.artifacts.model.predict(df)


BASE_TEST_PATH = "/tmp/bentoml-test"


def test_save_and_load_model():
Exemple #14
0
import numpy as np

import bentoml
from bentoml.artifact import OnnxModelArtifact, PickleArtifact
from bentoml.handlers import ImageHandler


@bentoml.env(auto_pip_dependencies=True)
@bentoml.artifacts([OnnxModelArtifact('model'), PickleArtifact('labels')])
class OnnxResnet50(bentoml.BentoService):
    def preprocess(self, input_data):
        # convert the input data into the float32 input
        img_data = input_data.transpose(2, 0, 1).astype('float32')

        #normalize
        mean_vec = np.array([0.485, 0.456, 0.406])
        stddev_vec = np.array([0.229, 0.224, 0.225])
        norm_img_data = np.zeros(img_data.shape).astype('float32')
        for i in range(img_data.shape[0]):
            norm_img_data[i, :, :] = (img_data[i, :, :] / 255 -
                                      mean_vec[i]) / stddev_vec[i]

        #add batch channel
        norm_img_data = norm_img_data.reshape(1, 3, 224, 224).astype('float32')
        return norm_img_data

    def softmax(self, x):
        x = x.reshape(-1)
        e_x = np.exp(x - np.max(x))
        return e_x / e_x.sum(axis=0)
from bentoml import api, artifacts, env, BentoService
from bentoml.artifact import PickleArtifact
from bentoml.handlers import DataframeHandler

from keras.preprocessing import text, sequence
import numpy as np

list_of_classes = [
    "toxic", "severe_toxic", "obscene", "threat", "insult", "identity_hate"
]
max_text_length = 400


@env(pip_dependencies=['keras', 'pandas', 'numpy'])
@artifacts([PickleArtifact('x_tokenizer'), PickleArtifact('model')])
class ToxicCommentClassification(BentoService):
    def tokenize_df(self, df):
        comments = df['comment_text'].values
        tokenized = self.artifacts.x_tokenizer.texts_to_sequences(comments)
        input_data = sequence.pad_sequences(tokenized, maxlen=max_text_length)
        return input_data

    @api(DataframeHandler)
    def predict(self, df):
        input_data = self.tokenize_df(df)
        prediction = self.artifacts.model.predict(input_data)
        result = []
        for i in prediction:
            result.append(list_of_classes[np.argmax(i)])
        return result