class OnnxService(BentoService):
    def __init__(self):
        super().__init__()
        self.news_label = {
            1: 'World',
            2: 'Sports',
            3: 'Business',
            4: 'Sci/Tec'
        }

    def classify_categories(self, sentence):
        text_pipeline, _ = get_pipeline(self.artifacts.tokenizer,
                                        self.artifacts.vocab)
        text = to_numpy(torch.tensor(text_pipeline(sentence)).to(device))
        tensor_name = self.artifacts.model.get_inputs()[0].name
        output_name = self.artifacts.model.get_outputs()[0].name
        onnx_inputs = {tensor_name: text}
        print(f'providers: {self.artifacts.model.get_providers()}')

        try:
            r = self.artifacts.model.run([output_name], onnx_inputs)[0]
            return r.argmax(1).item() + 1
        except (RuntimeError, InvalidArgument) as e:
            print(f"ERROR with shape: {onnx_inputs[tensor_name].shape} - {e}")

    @api(input=JsonInput(), output=JsonOutput())
    def predict(self, parsed_json):
        sentence = parsed_json.get('text')
        return {
            'categories': self.news_label[self.classify_categories(sentence)]
        }
Esempio n. 2
0
class EmotionRecognitionService(bentoml.BentoService):
    @bentoml.api(input=ImageInput(), output=JsonOutput(), batch=False)
    def predict(self, image):
        print(image)
        return DeepFace.analyze(np.array(image),
                                actions=['emotion'],
                                enforce_detection=False)
Esempio n. 3
0
class PyTorchModel(bentoml.BentoService):
    '''
    @bentoml.utils.cached_property  # reuse transformer
    def transform(self):
        return transforms.Compose([transforms.CenterCrop((29, 29)), transforms.ToTensor()])
    '''
    @bentoml.api(input=JsonInput(), output=JsonOutput(), batch=True)
    def predict(self, input_json) -> List[str]:

        sentence = input_json[0]['sentence']

        tokenized_sentence = config.TOKENIZER.encode(sentence)

        sentence = sentence.split()
        #print(sentence)
        #print(tokenized_sentence)

        test_dataset = EntityDataset(texts=[sentence],
                                     pos=[[0] * len(sentence)],
                                     tags=[[0] * len(sentence)])

        device = torch.device("cpu")

        with torch.no_grad():
            data = test_dataset[0]
            for k, v in data.items():
                data[k] = v.to(device).unsqueeze(0)
            tag, pos, _ = self.artifacts.ner(**data)

            tags = enc_tag.inverse_transform(
                tag.argmax(2).cpu().numpy().reshape(
                    -1))[:len(tokenized_sentence)]

            i = 0
            names = []
            while i < len(tags):
                item = tags[i]
                indices = []
                if (item == "B-per"):
                    while (item == "B-per"):
                        indices.append(i)
                        i += 1
                        item = tags[i]
                    tokenized_name = tokenized_sentence[
                        indices[0]:indices[-1] + 1]
                    name = config.TOKENIZER.decode(tokenized_name)
                    names.append(name)
                indices = []
                i += 1

            resp = ','.join(names)

            return [resp]
Esempio n. 4
0
class ProfanityFilterService(BentoService):
    def model_pred(self, sentence):
        self.artifacts.model.eval()

        tokens = self.artifacts.tokenizer.tokenize(sentence)
        length = torch.LongTensor([len(tokens)]).to(device)
        idx = [vocab.stoi[token] for token in tokens]
        tensor = torch.LongTensor(idx).unsqueeze(-1).to(device)

        prediction = self.artifacts.model(tensor, length)
        probabilities = nn.functional.softmax(prediction, dim=-1)
        return probabilities.squeeze()[-1].item()

    @api(input=JsonInput(), output=JsonOutput())
    def predict(self, parsed_json):
        return self.model_pred(parsed_json["text"])
class ToxicCommentClassification(BentoService):
    
    def tokenize_df(self, df):
        comments = df['comment_text'].values
        tokenized = self.artifacts.x_tokenizer.texts_to_sequences(comments)        
        input_data = sequence.pad_sequences(tokenized, maxlen=max_text_length)
        return input_data
    
    @api(input=DataframeInput(), output=JsonOutput(), batch=True)
    def predict(self, df: pd.DataFrame) -> List[str]:
        input_data = self.tokenize_df(df)
        prediction = self.artifacts.model.predict(input_data)
        result = []
        for i in prediction:
            result.append(list_of_classes[np.argmax(i)])
        return result
Esempio n. 6
0
def test_string_input(make_api):
    from bentoml.adapters import StringInput, JsonOutput

    api = make_api(
        input_adapter=StringInput(), output_adapter=JsonOutput(), user_func=lambda i: i,
    )

    body = b'{"a": 1}'

    request = HTTPRequest(body=body)
    response = api.handle_request(request)

    assert json.loads(response.body) == body.decode()

    responses = api.handle_batch_request([request] * 3)
    for response in responses:
        assert json.loads(response.body) == body.decode()
Esempio n. 7
0
class PytorchService(BentoService):
    def __init__(self):
        super().__init__()
        self.news_label = {1: 'World',
                           2: 'Sports',
                           3: 'Business',
                           4: 'Sci/Tec'}

    def classify_categories(self, sentence):
        text_pipeline, _ = get_pipeline(self.artifacts.tokenizer, self.artifacts.vocab)
        with torch.no_grad():
            text = torch.tensor(text_pipeline(sentence)).to(device)
            offsets = torch.tensor([0]).to(device)
            output = self.artifacts.model(text, offsets=offsets)
            return output.argmax(1).item() + 1

    @api(input=JsonInput(), output=JsonOutput())
    def predict(self, parsed_json):
        label = self.classify_categories(parsed_json.get("text"))
        return {'categories': self.news_label[label]}
Esempio n. 8
0
class ExampleService(bentoml.BentoService):
    """
    Example BentoService class made for testing purpose
    """
    @bentoml.api(
        input=DataframeInput(dtype={"col1": "int"}),
        mb_max_latency=1000,
        mb_max_batch_size=2000,
        batch=True,
    )
    def predict_dataframe(self, df):
        return self.artifacts.model.predict_dataframe(df)

    @bentoml.api(DataframeHandler, dtype={"col1": "int"},
                 batch=True)  # deprecated
    def predict_dataframe_v1(self, df):
        return self.artifacts.model.predict_dataframe(df)

    @bentoml.api(input=MultiImageInput(input_names=('original', 'compared')),
                 batch=True)
    def predict_multi_images(self, originals, compareds):
        return self.artifacts.model.predict_multi_images(originals, compareds)

    @bentoml.api(input=ImageInput(), batch=True)
    def predict_image(self, images):
        return self.artifacts.model.predict_image(images)

    @bentoml.api(
        input=JsonInput(),
        mb_max_latency=1000,
        mb_max_batch_size=2000,
        batch=True,
    )
    def predict_with_sklearn(self, jsons):
        return self.artifacts.sk_model.predict(jsons)

    @bentoml.api(input=FileInput(), batch=True)
    def predict_file(self, files):
        return self.artifacts.model.predict_file(files)

    @bentoml.api(input=JsonInput(), batch=True)
    def predict_json(self, input_datas):
        return self.artifacts.model.predict_json(input_datas)

    CUSTOM_ROUTE = "$~!@%^&*()_-+=[]\\|;:,./predict"

    @bentoml.api(
        route=CUSTOM_ROUTE,
        input=JsonInput(),
        batch=True,
    )
    def customezed_route(self, input_datas):
        return input_datas

    CUSTOM_SCHEMA = {
        "application/json": {
            "schema": {
                "type": "object",
                "required": ["field1", "field2"],
                "properties": {
                    "field1": {
                        "type": "string"
                    },
                    "field2": {
                        "type": "uuid"
                    },
                },
            },
        }
    }

    @bentoml.api(input=JsonInput(request_schema=CUSTOM_SCHEMA), batch=True)
    def customezed_schema(self, input_datas):
        return input_datas

    @bentoml.api(input=JsonInput(), batch=True)
    def predict_strict_json(self,
                            input_datas,
                            tasks: Sequence[InferenceTask] = None):
        filtered_jsons = []
        for j, t in zip(input_datas, tasks):
            if t.http_headers.content_type != "application/json":
                t.discard(http_status=400, err_msg="application/json only")
            else:
                filtered_jsons.append(j)
        return self.artifacts.model.predict_json(filtered_jsons)

    @bentoml.api(input=JsonInput(), batch=True)
    def predict_direct_json(self,
                            input_datas,
                            tasks: Sequence[InferenceTask] = None):
        filtered_jsons = []
        for j, t in zip(input_datas, tasks):
            if t.http_headers.content_type != "application/json":
                t.discard(http_status=400, err_msg="application/json only")
            else:
                filtered_jsons.append(j)
        rets = self.artifacts.model.predict_json(filtered_jsons)
        return [
            InferenceResult(http_status=200, data=json.dumps(result))
            for result in rets
        ]

    @bentoml.api(input=JsonInput(), mb_max_latency=10000 * 1000, batch=True)
    def echo_with_delay(self, input_datas):
        data = input_datas[0]
        time.sleep(data['b'] + data['a'] * len(input_datas))
        return input_datas

    @bentoml.api(input=JsonInput())
    def echo_json(self, input_data):
        return input_data

    if version.parse(BENTOML_VERSION) > version.parse("0.12.1+0"):

        @bentoml.api(input=JsonInput(), output=JsonOutput(ensure_ascii=True))
        def echo_json_ensure_ascii(self, input_data):
            return input_data
Esempio n. 9
0
class Service(BentoService):
    @api(input=StringInput(), output=JsonOutput())
    def say_hi(self, x):
        return f"Hello {x}!"