def predict(): """ The second step is optional and it isn't necessary, you can comment that lines of code, but if you want to have a robust control of the data that people post in your API you have to include in a correct format in the same way that the example in validation.py """ if request.method == 'POST': # Step 1: Extract POST data from request body as JSON json_data = request.get_json() _logger.debug(f'Inputs: {json_data}') # Step 2: Validate the input using marshmallow schema input_data, errors = validate_inputs(input_data=json_data) # Step 3: Model prediction result = make_prediction(input_data=input_data) _logger.debug(f'Outputs: {result}') # Step 4: Convert numpy ndarray to list predictions = result.get('predictions').tolist() version = result.get('version') # Step 5: Return the response as JSON return jsonify({ 'predictions': predictions, 'version': version, 'errors': errors })
def batch_output_specific_model(model_id): """Output of a specific model with test.csv data parameters --------------- model_id: str returns --------------- """ model_id = model_id auc_score = '' # lightgbm_output_v0.1-1588759220.335498 if request.method == 'POST': data_mngmnt = Data() data_mngmnt.from_csv("test.csv", sep=',') data = data_mngmnt.df result = make_prediction(input_data=data, id_model=model_id) predictions = result.get('predictions').tolist() if 'default' in data.columns: auc_score = get_auc_score(data['default'], result.get('predictions')) return jsonify({ 'result': predictions, 'model': model_id, 'auc_score': auc_score })
def test_make_single_prediction(): # Given test_data = load_dataset(file_name='test.csv') single_test_json = test_data[0:1] # When subject = make_prediction(input_data=single_test_json) # Then assert subject is not None assert isinstance(subject.get('predictions')[0], float) assert math.ceil(subject.get('predictions')[0]) == 112476
def test_make_multiple_predictions(): # Given test_data = load_dataset(file_name='test.csv') original_data_length = len(test_data) multiple_test_json = test_data # When subject = make_prediction(input_data=multiple_test_json) # Then assert subject is not None assert len(subject.get('predictions')) == 1451 # We expect some rows to be filtered out assert len(subject.get('predictions')) != original_data_length
def outputs_upload(model_id): """Upload a csv file and return prediction output """ if request.method == 'POST': auc_score = '' # Create variable for uploaded file df = pd.read_csv(request.files.get('fileupload')) result = make_prediction(input_data=df, id_model=model_id) predictions = result.get('predictions').tolist() if 'default' in df.columns: auc_score = get_auc_score(df['default'], result.get('predictions')) return jsonify({ 'result': predictions, 'model': model_id, 'auc_score': auc_score })
def predict(): if request.method == "POST": # Step 1: Extract POST data from request body as JSON json_data = request.get_json() # Step 2: Access the model prediction function (also validates data) result = make_prediction(input_data=json_data) # Step 3: Handle errors errors = result.get("errors") if errors: return Response(json.dumps(errors), status=400) # Step 4: Split out results predictions = result.get("predictions").tolist() version = result.get("version") # Step 5: Prepare prediction response return jsonify({ "predictions": predictions, "version": version, "errors": errors })
def output_specific_model(model_id): model_id = model_id # lightgbm_output_v0.1-1588759220.335498 if request.method == 'POST': # Step 1: Extract POST data from request body as JSON data = request.get_json() # Step 2: Validate the input using marshmallow schema data, errors = validate_inputs(input_data=data) # Step 3: Model prediction result = make_prediction(input_data=data, id_model=model_id) predictions = result.get('predictions').tolist() version = result.get('version') return jsonify({ 'result': predictions, 'model': model_id, 'version': version, 'errors': errors })
def predict(): """Output of default model based on last timestamp""" if request.method == 'POST': # Step 1: Extract POST data from request body as JSON json_data = request.get_json() # _logger.debug(f'Inputs: {json_data}') # Step 2: Validate the input using marshmallow schema input_data, errors = validate_inputs(input_data=json_data) # Step 3: Model prediction result = make_prediction(input_data=input_data) _logger.debug(f'Outputs: {result}') # Step 4: Convert numpy ndarray to list predictions = result.get('predictions').tolist() version = result.get('version') # Step 5: Return the response as JSON return jsonify({ 'predictions': predictions, 'version': version, 'errors': errors })