Esempio n. 1
0
def predict():
    if request.method == "POST":
        # Step 1: Extract POST data from request body as JSON
        json_data = request.get_json()

        # Step 2: Access the model prediction function (also validates data)
        result = make_prediction(input_data=json_data)

        # Step 3: Handle errors
        errors = result.get("errors")
        if errors:
            return Response(json.dumps(errors), status=400)

        # Step 4: Split out results
        predictions = result.get("predictions").tolist()
        version = result.get("version")

        # Step 5: Save predictions
        persistence = PredictionPersistence(db_session=current_app.db_session)
        persistence.save_predictions(
            inputs=json_data,
            model_version=version,
            predictions=predictions,
            db_model=ModelType.GRADIENT_BOOSTING,
        )

        # Step 6: Prepare prediction response
        return jsonify({
            "predictions": predictions,
            "version": version,
            "errors": errors
        })
Esempio n. 2
0
def prev_predict():
    if request.method == "POST":
        # Extract data from the json
        # get_json output is a str and json.loads outputs us a list(dict) that can be transformed
        # into a dataframe and that is what the predict.make_prediction function is expecting as an input.
        # NOT REALLY ANYMORE
        json_data = request.get_json()
        _logger.info(f"Inputs  : {json_data}"
                    f"model : {ModelType.NEURALNET.name}"
                    f"model_version : {shadow_version}"
                    )

        # Check if the data is valid
        input_data,errors = validation.validate_data(json_data)

        # Making the predictions
        result = dl_make_prediction(input_data=input_data)
        _logger.info(f"Outputs : {result}")

        predictions = result.get("predictions").tolist()
        version = result.get("version")

        # Save predictions
        persistence = PredictionPersistence(db_session=current_app.db_session)

        persistence.save_predictions(
            inputs=json_data,
            model_version=version,
            predictions=predictions,
            db_model=ModelType.NEURALNET,
        )

        # Asynchronous shadow mode
        if current_app.config.get("SHADOW_MODE_ACTIVE"):
            _logger.debug(
                f"Calling shadow model asynchronously: "
                f"{ModelType.GRADIENT_BOOSTING.value}"
            )
            thread = threading.Thread(
                target=persistence.make_save_predictions,
                kwargs={
                    "db_model": ModelType.GRADIENT_BOOSTING,
                    "input_data": input_data,
                    "app": current_app._get_current_object(),
                    "json_data": json_data
                },
            )
            thread.start()

        return jsonify({"predictions": predictions,
                        "errors" : errors,
                        "version": version})
Esempio n. 3
0
def test_data_access(model_type, model, test_inputs_df):
    # Given
    # We mock the database session
    mock_session = mock.MagicMock()
    _persistence = PredictionPersistence(db_session=mock_session)

    # When
    _persistence.make_save_predictions(
        db_model=model_type,
        input_data=test_inputs_df.to_dict(orient="records"))

    # Then
    assert mock_session.commit.call_count == 1
    assert mock_session.add.call_count == 1
    assert isinstance(mock_session.add.call_args[0][0], model)
def predict():
    if request.method == "POST":
        # Step 1: Extract POST data from request body as JSON
        json_data = request.get_json()

        # Step 2a: Get and save live model predictions
        persistence = PredictionPersistence(db_session=current_app.db_session)
        result = persistence.make_save_predictions(db_model=ModelType.LASSO,
                                                   input_data=json_data)

        # Step 2b: Get and save shadow predictions asynchronously
        if current_app.config.get("SHADOW_MODE_ACTIVE"):
            _logger.debug(f"Calling shadow model asynchronously: "
                          f"{ModelType.GRADIENT_BOOSTING.value}")
            thread = threading.Thread(
                target=persistence.make_save_predictions,
                kwargs={
                    "db_model": ModelType.GRADIENT_BOOSTING,
                    "input_data": json_data,
                },
            )
            thread.start()

        # Step 3: Handle errors
        if result.errors:
            _logger.warning(f"errors during prediction: {result.errors}")
            return Response(json.dumps(result.errors), status=400)

        # Step 4: Monitoring
        for _prediction in result.predictions:
            PREDICTION_TRACKER.labels(
                app_name=APP_NAME,
                model_name=ModelType.LASSO.name,
                model_version=live_version).observe(_prediction)
            PREDICTION_GAUGE.labels(
                app_name=APP_NAME,
                model_name=ModelType.LASSO.name,
                model_version=live_version).set(_prediction)

        # Step 5: Prepare prediction response
        return jsonify({
            "predictions": result.predictions,
            "version": result.model_version,
            "errors": result.errors,
        })
Esempio n. 5
0
def predict():
    if request.method == "POST":
        # Step 1: Extract POST data from request body as JSON
        json_data = request.get_json()
        for entry in json_data:
            _logger.info(entry)

        # Step 2a: Get and save live model predictions
        print("Input data ",json_data)
        persistence = PredictionPersistence(db_session=current_app.db_session)
        result = persistence.make_save_predictions(
            db_model=ModelType.CATBOOST, input_data=json_data
        )

        # Step 2b: Get and save shadow predictions asynchronously
        if current_app.config.get("SHADOW_MODE_ACTIVE"):
            _logger.debug(
                f"Calling shadow model asynchronously: "
                f"{ModelType.GRADIENT_BOOSTING.value}"
            )
            thread = threading.Thread(
                target=persistence.make_save_predictions,
                kwargs={
                    "db_model": ModelType.GRADIENT_BOOSTING,
                    "input_data": json_data,
                },
            )
            thread.start()

        # Step 3: Handle errors
        if result.errors:
            _logger.warning(f"errors during prediction: {result.errors}")
            return Response(json.dumps(result.errors), status=400)

        # Step 4: Monitoring
        for _prediction in result.predictions:
            PREDICTION_TRACKER.observe(_prediction)
            NO_OF_PATIENTS.labels(
                    app_name=APP_NAME,
                    model_name=ModelType.CATBOOST.name,
                    model_version=live_version).inc(1)
            targetPredicted = 1 if _prediction>0.30 else 0
            if targetPredicted==1:
                PREDICTION_TRACKER_PREDICTED_POSITIVE.labels(
                    app_name=APP_NAME,
                    model_name=ModelType.CATBOOST.name,
                    model_version=live_version).inc(1)
            else: 
                PREDICTION_TRACKER_PREDICTED_NEGATIVE.labels(
                    app_name=APP_NAME,
                    model_name=ModelType.CATBOOST.name,
                    model_version=live_version).inc(1)        
        _logger.info(
            f'Prediction results for model: {ModelType.CATBOOST.name} '
            f'version: {result.model_version} '
            f'Output values: {result.predictions}')

        # Step 5: Prepare prediction response
        return jsonify(
            {
                "predictions": result.predictions,
                "version": result.model_version,
                "errors": result.errors,
            }
        )