Exemple #1
0
import numpy

import bentoml
import torch  # pylint: disable=import-error
from bentoml.adapters import DataframeInput
from bentoml.frameworks.pytorch import PytorchModelArtifact


@bentoml.env(infer_pip_packages=True)
@bentoml.artifacts([PytorchModelArtifact('model')])
class PytorchClassifier(bentoml.BentoService):
    @bentoml.api(input=DataframeInput(), batch=True)
    def predict(self, df):
        input_data = df.to_numpy().astype(numpy.float32)
        input_tensor = torch.from_numpy(input_data)
        output = self.artifacts.model(input_tensor)

        return output.unsqueeze(dim=0).item()
Exemple #2
0
import torch
import torch.nn as nn
from bentoml import BentoService, api, artifacts, env
from bentoml.adapters import JsonInput, JsonOutput
from bentoml.frameworks.pytorch import PytorchModelArtifact
from bentoml.service.artifacts.common import PickleArtifact

from train import vocab

device = torch.device("cpu")


@env(infer_pip_packages=True)
@artifacts([PytorchModelArtifact("model"), PickleArtifact("tokenizer")])
class ProfanityFilterService(BentoService):
    def model_pred(self, sentence):
        self.artifacts.model.eval()

        tokens = self.artifacts.tokenizer.tokenize(sentence)
        length = torch.LongTensor([len(tokens)]).to(device)
        idx = [vocab.stoi[token] for token in tokens]
        tensor = torch.LongTensor(idx).unsqueeze(-1).to(device)

        prediction = self.artifacts.model(tensor, length)
        probabilities = nn.functional.softmax(prediction, dim=-1)
        return probabilities.squeeze()[-1].item()

    @api(input=JsonInput(), output=JsonOutput())
    def predict(self, parsed_json):
        return self.model_pred(parsed_json["text"])
Exemple #3
0
from config import config
from dataset import EntityDataset
import utils
from model import EntityModel

meta_data = joblib.load("meta.bin")
enc_pos = meta_data["enc_pos"]
enc_tag = meta_data["enc_tag"]

num_pos = len(list(enc_pos.classes_))
num_tag = len(list(enc_tag.classes_))


@bentoml.env(infer_pip_packages=True)
@bentoml.artifacts([PytorchModelArtifact('ner')])
class PyTorchModel(bentoml.BentoService):
    '''
    @bentoml.utils.cached_property  # reuse transformer
    def transform(self):
        return transforms.Compose([transforms.CenterCrop((29, 29)), transforms.ToTensor()])
    '''
    @bentoml.api(input=JsonInput(), output=JsonOutput(), batch=True)
    def predict(self, input_json) -> List[str]:

        sentence = input_json[0]['sentence']

        tokenized_sentence = config.TOKENIZER.encode(sentence)

        sentence = sentence.split()
        #print(sentence)
Exemple #4
0
if model_frameowrk == 'pytorch':
    from PIL import Image
    import torch
    from torchvision import transforms

    classes = ['ant', 'bee']
    transform = transforms.Compose([
        transforms.Resize(256),
        transforms.CenterCrop(224),
        transforms.ToTensor(),
        transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    ])
    cpu = torch.device('cpu')

    @bentoml.env(pip_dependencies=['torch', 'torchvision'])
    @bentoml.artifacts([PytorchModelArtifact('mymodel_id')])
    class AntOrBeeClassifier(bentoml.BentoService):
        @bentoml.api(ImageHandler)
        def predict(self, img):
            img = Image.fromarray(img)
            img = transform(img)

            self.artifacts.model.eval()
            outputs = self.artifacts.model(img.unsqueeze(0))
            _, idxs = outputs.topk(1)
            idx = idxs.squeeze().item()
            return classes[idx]


################# Save to Bento Space ###################################
def bento_save():