async def task_handler(data:Iris, background_tasks: BackgroundTasks):
    data = data.dict()
    print("in the prediction")
    sepal_length = data['sepal_length']
    sepal_width = data['sepal_width']
    petal_length = data['petal_length']
    petal_width = data['petal_width']
    new_task = Job()
    jobs[new_task.uid] = new_task
    background_tasks.add_task(start_modelling_task, new_task.uid, [sepal_length, sepal_width, petal_length, petal_width])
    return new_task
Beispiel #2
0
async def predict(
    data: Iris
):  # Declare it as a parameter after Iris data model been created
    # convert Iris object into dictionary
    data = data.dict()
    print("in the prediction")
    # There are different way of input data
    # another way is to input as csv file using: fastapi.UploadFile
    sepal_length = data['sepal_length']
    sepal_width = data['sepal_width']
    petal_length = data['petal_length']
    petal_width = data['petal_width']
    prediction = await model(
        [sepal_length, sepal_width, petal_length, petal_width])
    print("prediction is done")
    # can control the return by specify the response_model inside @app.post() decorator
    return {"prediction": str(prediction)}