Ejemplo n.º 1
0
class ExampleBentoServiceSingle(ExampleBentoService):
    """
    Example BentoService class made for testing purpose
    """

    @bentoml.api(
        input=MultiImageInput(input_names=('original', 'compared')), batch=False
    )
    def predict_legacy_images(self, original, compared):
        return self.artifacts.model.predict_multi_images([original], [compared])[0]

    @bentoml.api(input=ImageInput(), batch=False)
    def predict_image(self, image):
        return self.artifacts.model.predict_image([image])[0]

    @bentoml.api(
        input=JsonInput(), mb_max_latency=1000, mb_max_batch_size=2000, batch=False
    )
    def predict_with_sklearn(self, json):
        return self.artifacts.sk_model.predict([json])[0]

    @bentoml.api(input=FileInput(), batch=False)
    def predict_file(self, file_):
        return self.artifacts.model.predict_file([file_])[0]

    @bentoml.api(input=JsonInput(), batch=False)
    def predict_json(self, input_data):
        return self.artifacts.model.predict_json([input_data])[0]
Ejemplo n.º 2
0
def test_json_handle_aws_lambda_event():
    test_content = '[{"name": "john","game": "mario","city": "sf"}]'

    def test_func(obj):
        return obj[0]["name"]

    input_adapter = JsonInput()
    success_event_obj = {
        "headers": {"Content-Type": "application/json"},
        "body": test_content,
    }
    success_response = input_adapter.handle_aws_lambda_event(
        success_event_obj, test_func
    )

    assert success_response["statusCode"] == 200
    assert success_response["body"] == '"john"'

    error_event_obj = {
        "headers": {"Content-Type": "this_will_fail"},
        "body": test_content,
    }
    with pytest.raises(BadInput) as e:
        input_adapter.handle_aws_lambda_event(error_event_obj, test_func)

    assert "Request content-type must be 'application/json" in str(e.value)
class RewardsProgramService(BentoService):
    """
    REST API FOR RewardsProgramService
    """

    #API that takes in a filepath to do prediction service
    #Filepath has to be found on the local machine that is running this service

    def testInput(self, input):
        return self.GetPoint(input)

    """
    Interface takes in the json data format.
    The following keys are what this api expects from the json data
    -- custID :
    Output:
    The api will output a json/dictionary with the following items
    
    """
    @env(infer_pip_packages=True,requirements_txt_file='requirements.txt')
    @api(input=JsonInput())
    def GetPoint(self, parsed_json):
        custID = parsed_json["custID"]
        #Connect to mariadb
        cur  = ConnectToDatabase()
        if(cur is None):
            #Return empty json
            return {}
        print("Successfully connected to database")
        #Check if user exist
        return GetRewardInfo(cur,custID)


    """
    Interface takes in the json data format.
    The following keys are what this api expects from the json data
    -- custID :
    -- pts    : 
    -- firstName:
    -- lastName:

    Output:
    The api will output a json/dictionary with the following items
    
    """
    @env(infer_pip_packages=True,requirements_txt_file='requirements.txt')
    @api(input=JsonInput())
    def addPoints(self, parsed_json):
        custID = parsed_json["custID"]
        pts    = parsed_json["points"]
        cur    = ConnectToDatabase()
        #Connect to mariadb
        if(cur is None):
            #Return empty json
            return {}

        #Check if user exist
        print("Adding points")
        return AddRewardPoints(cur,parsed_json)
Ejemplo n.º 4
0
class KerasClassifier(bentoml.BentoService):
    @bentoml.api(input=JsonInput(), batch=True)
    def predict(self, jsons):
        return self.artifacts.model.predict(np.array(jsons))

    @bentoml.api(input=JsonInput(), batch=True)
    def predict2(self, jsons):
        return self.artifacts.model2.predict(np.array(jsons))
Ejemplo n.º 5
0
class ExampleBentoService(bentoml.BentoService):
    """
    Example BentoService class made for testing purpose
    """
    @bentoml.api(input=JsonInput(),
                 mb_max_latency=1000,
                 mb_max_batch_size=2000,
                 batch=True)
    def predict_with_sklearn(self, jsons):
        """predict_dataframe expects dataframe as input
        """
        return self.artifacts.sk_model.predict(jsons)

    @bentoml.api(
        input=DataframeInput(dtype={"col1": "int"}),
        mb_max_latency=1000,
        mb_max_batch_size=2000,
        batch=True,
    )
    def predict_dataframe(self, df):
        """predict_dataframe expects dataframe as input
        """
        return self.artifacts.model.predict_dataframe(df)

    @bentoml.api(DataframeHandler, dtype={"col1": "int"},
                 batch=True)  # deprecated
    def predict_dataframe_v1(self, df):
        """predict_dataframe expects dataframe as input
        """
        return self.artifacts.model.predict_dataframe(df)

    @bentoml.api(input=ImageInput(), batch=True)
    def predict_image(self, images):
        return self.artifacts.model.predict_image(images)

    @bentoml.api(input=FileInput(), batch=True)
    def predict_file(self, files):
        return self.artifacts.model.predict_file(files)

    @bentoml.api(input=LegacyImageInput(input_names=('original', 'compared')))
    def predict_legacy_images(self, original, compared):
        return self.artifacts.model.predict_legacy_images(original, compared)

    @bentoml.api(input=JsonInput(), batch=True)
    def predict_json(self, input_datas):
        return self.artifacts.model.predict_json(input_datas)

    @bentoml.api(input=JsonInput(), mb_max_latency=10000 * 1000, batch=True)
    def echo_with_delay(self, input_datas):
        data = input_datas[0]
        time.sleep(data['b'] + data['a'] * len(input_datas))
        return input_datas
Ejemplo n.º 6
0
def test_json_handle_cli(capsys, tmpdir):
    def test_func(obj):
        return obj[0]["name"]

    input_adapter = JsonInput()

    json_file = tmpdir.join("test.json")
    with open(str(json_file), "w") as f:
        f.write('[{"name": "john","game": "mario","city": "sf"}]')

    test_args = ["--input={}".format(json_file)]
    input_adapter.handle_cli(test_args, test_func)
    out, err = capsys.readouterr()
    assert out.strip().endswith("john")
Ejemplo n.º 7
0
class NonBatchExampleService(bentoml.BentoService):
    """
    Example BentoService class made for testing purpose
    """

    @bentoml.api(
        input=MultiImageInput(input_names=('original', 'compared')), batch=False
    )
    def predict_multi_images(self, original, compared):
        return self.artifacts.model.predict_multi_images([original], [compared])[0]

    @bentoml.api(input=ImageInput(), batch=False)
    def predict_image(self, image):
        return self.artifacts.model.predict_image([image])[0]

    @bentoml.api(
        input=JsonInput(), mb_max_latency=1000, mb_max_batch_size=2000, batch=False
    )
    def predict_with_sklearn(self, json_value):
        return self.artifacts.sk_model.predict([json_value])[0]

    @bentoml.api(input=FileInput(), batch=False)
    def predict_file(self, file_):
        return self.artifacts.model.predict_file([file_])[0]

    @bentoml.api(input=JsonInput(), batch=False)
    def predict_json(self, input_data):
        return self.artifacts.model.predict_json([input_data])[0]

    @bentoml.api(input=JsonInput(), batch=False)
    def predict_strict_json(self, input_data, task: InferenceTask = None):
        if task.http_headers.content_type != "application/json":
            task.discard(http_status=400, err_msg="application/json only")
            return
        result = self.artifacts.model.predict_json([input_data])[0]
        return result

    @bentoml.api(input=JsonInput(), batch=False)
    def predict_direct_json(self, input_data, task: InferenceTask = None):
        if task.http_headers.content_type != "application/json":
            return InferenceError(http_status=400, err_msg="application/json only")
        result = self.artifacts.model.predict_json([input_data])[0]
        return InferenceResult(http_status=200, data=json.dumps(result))

    @bentoml.api(input=JsonInput(), mb_max_latency=10000 * 1000, batch=True)
    def echo_with_delay(self, input_datas):
        data = input_datas[0]
        time.sleep(data['b'] + data['a'] * len(input_datas))
        return input_datas
Ejemplo n.º 8
0
class Service(BentoService):
    @api(input=JsonInput(), batch=True)
    def invert(self, input: List[JsonSerializable]):
        input = input[0]
        output = []
        for inp in input:
            inp = inp["input"]
            output += [{"inverted": inp[::-1]}]
        output = {"result": output, "meta": None}
        return [output]

    @api(input=JsonInput(), batch=True)
    def shuffle(self, input: List[JsonSerializable]):
        input = input[0]
        output = []
        for inp in input:
            inp = inp["input"]
            linp = list(inp)
            random.shuffle(linp)
            output += [{"shuffled": "".join(linp)}]
        return [output]

    @api(input=JsonInput(), batch=True)
    def tokenize(self, input: List[JsonSerializable]):
        input = input[0]
        output = []
        for inp in input:
            inp = inp["input"]
            inp = inp[:30].ljust(30, " ")
            linp = list(inp)
            output += [{"tokenized": linp}]
        return [output]

    @api(input=JsonInput(), batch=True)
    def first_characters(self, input: List[JsonSerializable]):
        input = input[0]
        output = []
        for inp in input:
            inp = inp["input"]
            inp = inp[:3].ljust(3, " ")
            linp = list(inp)
            output += [{"fchars": linp}]
        output = {
            "result": output,
            "meta": {
                "fchars": ["char1", "char2", "char3"]
            }
        }
        return [output]
class ExampleBentoService(bentoml.BentoService):
    """
    Example BentoService class made for testing purpose
    """
    @bentoml.api(input=DataframeInput(),
                 mb_max_latency=1000,
                 mb_max_batch_size=2000,
                 batch=True)
    def predict(self, df):
        """An API for testing simple bento model service
        """
        return self.artifacts.model.predict(df)

    @bentoml.api(input=DataframeInput(dtype={"col1": "int"}), batch=True)
    def predict_dataframe(self, df):
        """predict_dataframe expects dataframe as input
        """
        return self.artifacts.model.predict_dataframe(df)

    @bentoml.api(DataframeHandler, dtype={"col1": "int"},
                 batch=True)  # deprecated
    def predict_dataframe_v1(self, df):
        """predict_dataframe expects dataframe as input
        """
        return self.artifacts.model.predict_dataframe(df)

    @bentoml.api(input=ImageInput(), batch=True)
    def predict_image(self, images):
        return self.artifacts.model.predict_image(images)

    @bentoml.api(input=MultiImageInput(input_names=('original', 'compared')),
                 batch=False)
    def predict_multi_images(self, original, compared):
        return self.artifacts.model.predict_multi_images(original, compared)

    @bentoml.api(input=JsonInput(), batch=True)
    def predict_json(self, input_data):
        return self.artifacts.model.predict_json(input_data)

    CUSTOM_ROUTE = "$~!@%^&*()_-+=[]\\|;:,./predict"

    @bentoml.api(
        route=CUSTOM_ROUTE,
        input=JsonInput(),
        batch=True,
    )
    def customize_route(self, input_data):
        return input_data
Ejemplo n.º 10
0
class SummarizerService(VersaillesService):
    @bentoml.api(input=JsonInput(), batch=False)
    def predict(self, parsed_json: JsonSerializable):
        text = parsed_json.get("text")
        model, tokenizer = self.get_artifacts()

        # tokenize
        inputs = tokenizer.encode("summarize: " + text,
                                  return_tensors="pt",
                                  max_length=512)

        # invalidate token lengths of less than 10
        if len(inputs[0]) < 10:
            return InferenceError(err_msg="text too short", http_status=400)

        # summarize text, top 4 results
        output = model.generate(inputs,
                                max_length=150,
                                min_length=40,
                                length_penalty=2.0,
                                num_beams=4,
                                early_stopping=True)

        # decode most likely
        output = tokenizer.decode(output[0],
                                  skip_special_tokens=True).replace(" .", ".")
        json_out = json.dumps({"result": output})
        return InferenceResult(
            data=json_out,
            http_status=200,
            http_headers={"Content-Type": "application/json"},
        )
class OnnxService(BentoService):
    def __init__(self):
        super().__init__()
        self.news_label = {
            1: 'World',
            2: 'Sports',
            3: 'Business',
            4: 'Sci/Tec'
        }

    def classify_categories(self, sentence):
        text_pipeline, _ = get_pipeline(self.artifacts.tokenizer,
                                        self.artifacts.vocab)
        text = to_numpy(torch.tensor(text_pipeline(sentence)).to(device))
        tensor_name = self.artifacts.model.get_inputs()[0].name
        output_name = self.artifacts.model.get_outputs()[0].name
        onnx_inputs = {tensor_name: text}
        print(f'providers: {self.artifacts.model.get_providers()}')

        try:
            r = self.artifacts.model.run([output_name], onnx_inputs)[0]
            return r.argmax(1).item() + 1
        except (RuntimeError, InvalidArgument) as e:
            print(f"ERROR with shape: {onnx_inputs[tensor_name].shape} - {e}")

    @api(input=JsonInput(), output=JsonOutput())
    def predict(self, parsed_json):
        sentence = parsed_json.get('text')
        return {
            'categories': self.news_label[self.classify_categories(sentence)]
        }
Ejemplo n.º 12
0
class Service(BentoService):
    @api(input=JsonInput(), batch=True)
    def predict(self, input: List[JsonSerializable]):
        input = input[0]
        smiles_list = [inp["input"] for inp in input]
        output = self.artifacts.model.predict(smiles_list)
        return [output]
Ejemplo n.º 13
0
class ExampleService(bentoml.BentoService):
    """
    Example BentoService class made for testing purpose
    """
    @bentoml.api(input=JsonInput(), mb_max_latency=3 * 1000, batch=True)
    def echo_with_delay_max3(self, input_datas):
        data = input_datas[0]
        time.sleep(float(data))
        return input_datas
Ejemplo n.º 14
0
class TransformersGPT2TextGenerator(bentoml.BentoService):
    @bentoml.api(input=JsonInput(), batch=False)
    def predict(self, parsed_json):
        src_text = parsed_json.get("text")
        model = self.artifacts.gptModel.get("model")
        tokenizer = self.artifacts.gptModel.get("tokenizer")
        input_ids = tokenizer.encode(src_text, return_tensors="pt")
        output = model.generate(input_ids, max_length=50)
        output = tokenizer.decode(output[0], skip_special_tokens=True)
        return output
Ejemplo n.º 15
0
    class FraudDetectionAndIdentityService(BentoService):
        @api(input=JsonInput(), batch=True)
        def fraud_detect(self, json_list):
            # user-defined callback function that process inference requests
            pass

        @api(input=DataframeInput(input_json_orient='records'), batch=True)
        def identity(self, df):
            # user-defined callback function that process inference requests
            pass
Ejemplo n.º 16
0
class TfModelService(bentoml.BentoService):
    @bentoml.api(input=JsonInput(), batch=False)
    def predict(self, jsonIn):
        jsonIn = json.loads(jsonIn)
        # print(f"\n\n\tjsonIn -> {type(jsonIn)}")
        # print(f"\tjsonIn -> {list(jsonIn.keys())} -> {list(jsonIn[list(jsonIn.keys())[0]].keys())}\n")
        prediction = self.artifacts.model(
            load_img(jsonIn["inputs"]["contentPath"]),
            load_img(jsonIn["inputs"]["stylePath"]))[0]
        save_img(prediction=prediction, imgPath=jsonIn["outputPath"])
        return True
Ejemplo n.º 17
0
class ExampleBentoService(bentoml.BentoService):
    """
    Example BentoService class made for testing purpose
    """
    @bentoml.api(
        input=JsonInput(),
        mb_max_latency=1000,
        mb_max_batch_size=2000,
    )
    def predict_with_sklearn(self, jsons):
        """predict_dataframe expects dataframe as input
        """
        return self.artifacts.sk_model.predict(jsons)

    @bentoml.api(
        input=DataframeInput(input_dtypes={"col1": "int"}),
        mb_max_latency=1000,
        mb_max_batch_size=2000,
    )
    def predict_dataframe(self, df):
        """predict_dataframe expects dataframe as input
        """
        return self.artifacts.model.predict_dataframe(df)

    @bentoml.api(DataframeHandler, input_dtypes={"col1": "int"})  # deprecated
    def predict_dataframe_v1(self, df):
        """predict_dataframe expects dataframe as input
        """
        return self.artifacts.model.predict_dataframe(df)

    @bentoml.api(input=ImageInput())
    def predict_image(self, images):
        return self.artifacts.model.predict_image(images)

    @bentoml.api(input=LegacyImageInput(input_names=('original', 'compared')))
    def predict_legacy_images(self, original, compared):
        return self.artifacts.model.predict_legacy_images(original, compared)

    @bentoml.api(input=JsonInput())
    def predict_json(self, input_data):
        return self.artifacts.model.predict_json(input_data)
Ejemplo n.º 18
0
class PyTorchModel(bentoml.BentoService):
    '''
    @bentoml.utils.cached_property  # reuse transformer
    def transform(self):
        return transforms.Compose([transforms.CenterCrop((29, 29)), transforms.ToTensor()])
    '''
    @bentoml.api(input=JsonInput(), output=JsonOutput(), batch=True)
    def predict(self, input_json) -> List[str]:

        sentence = input_json[0]['sentence']

        tokenized_sentence = config.TOKENIZER.encode(sentence)

        sentence = sentence.split()
        #print(sentence)
        #print(tokenized_sentence)

        test_dataset = EntityDataset(texts=[sentence],
                                     pos=[[0] * len(sentence)],
                                     tags=[[0] * len(sentence)])

        device = torch.device("cpu")

        with torch.no_grad():
            data = test_dataset[0]
            for k, v in data.items():
                data[k] = v.to(device).unsqueeze(0)
            tag, pos, _ = self.artifacts.ner(**data)

            tags = enc_tag.inverse_transform(
                tag.argmax(2).cpu().numpy().reshape(
                    -1))[:len(tokenized_sentence)]

            i = 0
            names = []
            while i < len(tags):
                item = tags[i]
                indices = []
                if (item == "B-per"):
                    while (item == "B-per"):
                        indices.append(i)
                        i += 1
                        item = tags[i]
                    tokenized_name = tokenized_sentence[
                        indices[0]:indices[-1] + 1]
                    name = config.TOKENIZER.decode(tokenized_name)
                    names.append(name)
                indices = []
                i += 1

            resp = ','.join(names)

            return [resp]
class TitanicSurvivalPredictionService(bentoml.BentoService):
    @bentoml.api(input=JsonInput())
    def predict(self, datain):
        # datain is a list of a json object.
        df = pd.read_json(json.dumps(datain[0]), orient="table")

        data = df[["Pclass", "Age", "Fare", "SibSp", "Parch"]]
        result = pd.DataFrame()
        result["xgb_proba"] = self.artifacts.xgb.predict_proba(data)[:, 1]
        result["lgb_proba"] = self.artifacts.lgb.predict_proba(data)[:, 1]
        # make sure to return as a list of json
        return [result.to_json(orient="table")]
Ejemplo n.º 20
0
class DiabetesRegressor(BentoService):
    @api(input=JsonInput())
    def predict(self, parsed_json):
        # note: I think .pack(), @artifacts and self.artifacts all need to refer to the same model name
        # in this case, "scikit_model"

        # also, parsed json has a list of parsed inputs!
        # https://docs.bentoml.org/en/latest/api/adapters.html?highlight=JsonInput#bentoml.adapters.JsonInput
        # this means that if you are NOT doing batched input, you need to send exactly one input and force to [0]

        inp_data = np.array(parsed_json[0]['input'])
        return self.artifacts.scikit_model.predict(inp_data)
Ejemplo n.º 21
0
class NERService(VersaillesService):
    label_list = [
        "O",  # Outside of a named entity
        "B-MISC",  # Beginning of a miscellaneous entity right after another miscellaneous entity
        "I-MISC",  # Miscellaneous entity
        "B-PER",  # Beginning of a person's name right after another person's name
        "I-PER",  # Person's name
        "B-ORG",  # Beginning of an organisation right after another organisation
        "I-ORG",  # Organisation
        "B-LOC",  # Beginning of a location right after another location
        "I-LOC"  # Location
    ]

    @bentoml.api(input=JsonInput(), batch=False)
    def predict(self, parsed_json: JsonSerializable):
        text = parsed_json.get("text")
        model, tokenizer = self.get_artifacts()

        # tokenize
        tokens = tokenizer.tokenize(tokenizer.decode(tokenizer.encode(text)))
        inputs = tokenizer.encode(text, return_tensors="pt")

        # invalidate token lengths of less than 10
        if len(inputs[0]) < 10:
            return InferenceError(err_msg="text too short", http_status=400)

        # get logits and argmax
        outputs = model(inputs).logits
        output = torch.argmax(outputs, dim=2)[0].numpy()

        # token fragment grouping
        res = []
        prev_decoded = 'O'
        for token, prediction in zip(tokens, output):
            decoded = self.label_list[prediction]
            if decoded != 'O':
                if decoded == prev_decoded:
                    if token.startswith('##'):
                        new_token = res[-1][0] + token[2:]
                    else:
                        new_token = res[-1][0] + ' ' + token
                    res[-1] = (new_token, decoded)
                else:
                    res.append((token, decoded))
            prev_decoded = decoded

        json_out = json.dumps({"result": res})
        return InferenceResult(
            data=json_out,
            http_status=200,
            http_headers={"Content-Type": "application/json"},
        )
Ejemplo n.º 22
0
class Service(BentoService):
    """The Service class uses BentoService to build multiple inference APIs"""
    @api(input=JsonInput())
    def predict(self, input: JsonSerializable):
        """
        This is a dummy test model.
        It counts atoms in a SMILES string.
        """
        mol = read_smiles(input, explicit_hydrogen=True)
        counts = collections.defaultdict(int)
        for _, atom in mol.nodes(data="element"):
            counts[atom] += 1
        return json.dumps(counts)
Ejemplo n.º 23
0
class Service(BentoService):
    @api(input=JsonInput(), batch=True)
    def predict(self, input: List[JsonSerializable]):
        """
        This is a dummy test model.
        It returns the molecular weight of a molecule.
        """
        input = input[0]
        output = []
        for inp in input:
            mol = Chem.MolFromSmiles(inp["input"])
            mw = Descriptors.MolWt(mol)
            output += [{"mw": mw}]
        return [output]
Ejemplo n.º 24
0
class Tensorflow2Classifier(bentoml.BentoService):
    @bentoml.api(input=TfTensorInput(), batch=True)
    def predict1(self, tensor):
        return self.artifacts.model1(tensor)

    @bentoml.api(input=TfTensorInput(), batch=True)
    def predict2(self, tensor):
        return self.artifacts.model2(tensor)

    @bentoml.api(input=JsonInput(), batch=True)
    def predict3(self, jsons):
        import tensorflow as tf

        tensor = tf.ragged.constant(jsons, dtype=tf.float64)
        return self.artifacts.model3(tensor)
Ejemplo n.º 25
0
class ProfanityFilterService(BentoService):
    def model_pred(self, sentence):
        self.artifacts.model.eval()

        tokens = self.artifacts.tokenizer.tokenize(sentence)
        length = torch.LongTensor([len(tokens)]).to(device)
        idx = [vocab.stoi[token] for token in tokens]
        tensor = torch.LongTensor(idx).unsqueeze(-1).to(device)

        prediction = self.artifacts.model(tensor, length)
        probabilities = nn.functional.softmax(prediction, dim=-1)
        return probabilities.squeeze()[-1].item()

    @api(input=JsonInput(), output=JsonOutput())
    def predict(self, parsed_json):
        return self.model_pred(parsed_json["text"])
Ejemplo n.º 26
0
class Service(BentoService):
    @api(input=JsonInput(), batch=True)
    def predict(self, input: List[JsonSerializable]):
        """
        This is a dummy test model.
        It counts atoms in a SMILES string.
        """
        input = input[0]
        output = []
        for inp in input:
            mol = read_smiles(inp["input"], explicit_hydrogen=True)
            counts = collections.defaultdict(int)
            for _, atom in mol.nodes(data="element"):
                counts[atom] += 1
            output += [{"atoms": counts}]
        return [output]
Ejemplo n.º 27
0
class TensorflowService(BentoService):
    def word_to_index(self, word):
        if word in self.artifacts.tokenizer and self.artifacts.tokenizer[
                word] <= 5000:
            return self.artifacts.tokenizer[word]
        else:
            return self.artifacts.tokenizer["<OOV>"]

    def preprocessing(self, text_str):
        proc = text_to_word_sequence(preprocess(text_str))
        tokens = list(map(self.word_to_index, proc))
        return tokens

    @api(input=JsonInput())
    def predict(self, parsed_json):
        raw = self.preprocessing(parsed_json['text'])
        input_data = [raw[:n + 1] for n in range(len(raw))]
        input_data = pad_sequences(input_data, maxlen=100, padding="post")
        return self.artifacts.model.predict(input_data)
Ejemplo n.º 28
0
class PytorchService(BentoService):
    def __init__(self):
        super().__init__()
        self.news_label = {1: 'World',
                           2: 'Sports',
                           3: 'Business',
                           4: 'Sci/Tec'}

    def classify_categories(self, sentence):
        text_pipeline, _ = get_pipeline(self.artifacts.tokenizer, self.artifacts.vocab)
        with torch.no_grad():
            text = torch.tensor(text_pipeline(sentence)).to(device)
            offsets = torch.tensor([0]).to(device)
            output = self.artifacts.model(text, offsets=offsets)
            return output.argmax(1).item() + 1

    @api(input=JsonInput(), output=JsonOutput())
    def predict(self, parsed_json):
        label = self.classify_categories(parsed_json.get("text"))
        return {'categories': self.news_label[label]}
Ejemplo n.º 29
0
class CategorizationService(VersaillesService):
    categories = [
        "environmental", "defence", "education", "economy", "legal", "energy",
        "healthcare", "indigenous", "technology", "parliament",
        "infrastructure", "transportation", "agriculture", "media"
    ]

    def _get_hypotheses(self):
        return [f'This example is about {label}.' for label in self.categories]

    @bentoml.api(input=JsonInput(), batch=False)
    def predict(self, parsed_json: JsonSerializable):
        text = parsed_json.get("text")
        model, tokenizer = self.get_artifacts()

        def encode(hypothesis):
            return tokenizer.encode(text,
                                    hypothesis,
                                    padding='longest',
                                    return_tensors='pt',
                                    truncation_strategy='only_first')

        hypotheses = self._get_hypotheses()
        inputs = [encode(hypothesis) for hypothesis in hypotheses]
        stacked = torch.stack(inputs, dim=1)
        logits = model(stacked[0])[0]
        entail_contradiction_logits = logits[:, [0, 2]]
        probs = entail_contradiction_logits.softmax(dim=1)[:, 1]

        res = {}
        for label, prob in zip(self.categories, probs):
            res[label] = prob.item()

        json_out = json.dumps({"result": res})
        return InferenceResult(
            data=json_out,
            http_status=200,
            http_headers={"Content-Type": "application/json"},
        )
Ejemplo n.º 30
0
class StonkClonk(BentoService):
    @api(input=JsonInput(http_input_example='{"ticker": "AAPL", "months": 3}'))
    def predict(self, request):
        stock = yf.Ticker(request["ticker"])
        hist = stock.history(period="5y", interval="1mo").to_period(freq="M")
        data = hist[~hist.index.duplicated(keep="first")]["Close"]

        forecaster = ARIMA(order=(3, 1, 0),
                           seasonal_order=(0, 1, 0, 12),
                           suppress_warnings=True)
        forecaster.fit(data)

        fh = ForecastingHorizon(pd.period_range(
            datetime.now() + timedelta(30),
            datetime.now() + timedelta(30 * request["months"]),
            freq="M"),
                                is_relative=False)
        y_pred = forecaster.predict(fh)

        output = data.append(y_pred)

        output.index = output.index.map(lambda date: str(date))
        return output.to_dict()