def make_prediction(data: dict) -> dict: data = pd.read_json(data) validated_data = validate_inputs(data[config.FEATURES]) prediction = _price_pipeline.predict(validated_data) output = np.exp(prediction) response = {"predictions": output} return response
def make_prediction(input_data): """Make a prediction using a saved model pipeline. Args: input_data: Array of model prediction inputs. Returns: Predictions for each input row, as well as the model version. """ #data= pd.read_csv(input_data) data = input_data validated_data = validate_inputs(input_data=data) print("validated data", validated_data) prediction = _price_pipe.predict(validated_data[config.FEATURES]) print("PREDICTION", prediction) output = np.exp(prediction) #results = {"predictions": output, "version": _version} results = output _logger.info(f"Making predictions with model version: {_version} " f"Inputs: {validated_data} " f"Predictions: {results}") return results #print(config.DATASET_DIR) #result= make_prediction(config.DATASET_DIR/'atest.csv') #print(result)
def make_prediction( *, input_data: t.Union[pd.DataFrame, dict], ) -> dict: """Make a prediction using a saved model pipeline. Args: input_data: Array of model prediction inputs. Returns: Predictions for each input row, as well as the model version. """ data = pd.DataFrame(input_data) validated_data = validate_inputs(input_data=data) prediction = _price_pipe.predict( validated_data[config.FEATURES] ) # use pipeline that we loaded above to make predictions so all the preprocessing functions will run on this data before output = np.exp(prediction) #this will give prediciton at the end results = {'predictions': output, 'version': _version} _logger.info(f'Making predictions with model version: {_version} ' f'Inputs: {validated_data} ' f'Predictions: {results}') return results
def make_prediction(): """Make a prediction using a saved model pipeline. Args: input_data: Array of model prediction inputs. Returns: Predictions for each input row, as well as the model version. """ test_data = load_dataset(file_name='test.csv') data = pd.DataFrame(test_data) validated_data = validate_inputs(input_data=data) prediction = _price_pipe.predict(validated_data[config.FEATURES]) output = np.exp(prediction) results = {'predictions': output, 'version': _version} _logger.info( f'Making predictions with model version: {_version} ' f'Inputs: {validated_data} ' f'Predictions: {results}') return results
def make_prediction( *, input_data: t.Union[pd.DataFrame, dict], ) -> dict: """Make a prediction using a saved model pipeline. Args: input_data: Array of model prediction inputs. Returns: Predictions for each input row, as well as the model version. """ data = pd.DataFrame(input_data) validated_data = validate_inputs(input_data=data) prediction = _price_pipe.predict(validated_data[config.FEATURES]) output = np.exp(prediction) length = data.shape[0] results = {"predictions": output, "version": _version} _logger.info(f"Making predictions with model version: {_version} " f"Inputs: {validated_data} " f"Predictions: {results}" f"Length: {length}") return results
def make_prediction(*, input_data: t.Union[pd.DataFrame, dict]) -> dict: """Make a prediction using a saved model pipeline. Args: input_data: Array of model prediction inputs. Returns: Predictions for each input row, as well as the model version. """ data = pd.DataFrame(input_data) validated_data = validate_inputs(input_data=data) predictions = _price_pipe.predict(validated_data[config.FEATURES]) output = np.exp(predictions) results = {'predictions': output, 'version': _version} _logger.info( f'Making predictions with model version: {_version} ' f'Inputs: {validated_data} ' f'Predictions: {results} ') return results
def test_pipeline_predict_takes_validated_input(pipeline_inputs, sample_input_data): X_train, X_test, y_train, y_test = pipeline_inputs pipeline.energy_pipe.fit(X_train, y_train) validated_inputs = validate_inputs(input_data=sample_input_data) predictions = pipeline.energy_pipe.predict( validated_inputs[config.FEATURES]) assert predictions is not None
def make_prediction_django(*, input_data) -> dict: data = pd.DataFrame(input_data, index=[0]) validated_data = validate_inputs(input_data=data) prediction = _pipe.predict(validated_data[config.FEATURES]) results = {"predictions": prediction, "version": _version} _logger.info( f"Making predictions with model version: {_version} " f"Inputs: {validated_data[config.FEATURES]} " f"Predictions: {results}" ) return results
def model_predict(*, input_data): """makes predictions via the loaded pipeline""" raw_test_data = pd.DataFrame(input_data) test_data = validate_inputs(raw_test_data) output = loaded_pipeline.predict(test_data[config.FEATURES]) output = output.astype(float) results = {'predictions': output, 'version': _version} _logger.info(f'making prediction with model version {_version}' f'inputs used for prediction: {test_data}' f'prediction results: {results}') return results
def make_prediction(*, input_data) -> dict: """Make a prediction using the saved model pipeline.""" data = pd.read_json(input_data) validated_data = validate_inputs(input_data=data) prediction = _price_pipe.predict(validated_data[config.FEATURES]) output = np.exp(prediction) results = {"predictions": output, "version": _version} _logger.info(f"Making predictions with model version: {_version} " f"Inputs: {validated_data}" f"Predictions: {results}") return results
def make_prediction(*, input_data) -> dict: """Make a prediction using the saved model pipeline.""" data = pd.DataFrame(input_data) validated_data = validate_inputs(input_data=data) prediction = _titanic_pipe.predict(validated_data[config.FEATURES]) results = {"predictions": prediction, "version": _version} _logger.info(f"Making predictions with model version: {_version} " f"Inputs: {validated_data} " f"Predictions: {results}") return results
def make_prediction(*, input_data) -> dict: """Make a prediction using the saved model pipeline.""" data = pd.DataFrame(input_data) validated_data = validate_inputs(input_data=data) prediction = _price_pipe.predict(validated_data[config.FEATURES]) output = np.exp(prediction) results = {'predictions': output, 'version': _version} _logger.info(f'Making predictions with model version: {_version} ' f'Inputs: {validated_data} ' f'Predictions: {results}') return results
def make_prediction(*, input_data) -> dict: """保存されたモデルパイプラインを使用して予測を行う。""" data = pd.read_json(input_data) validated_data = validate_inputs(input_data=data) prediction = _price_pipe.predict(validated_data[config.FEATURES]) output = np.exp(prediction) results = {'predictions': output, 'version': _version} _logger.info(f'Making predictions with model version: {_version} ' f'Inputs: {validated_data} ' f'Predictions: {results}') return results
def make_prediction(*, input_data) -> dict: """Hacer predicción utilizando el pipeline definido.""" data = pd.read_json(input_data) validated_data = validate_inputs(input_data=data) #desde validation.py prediction = _price_pipe.predict(validated_data[config.FEATURES]) output = np.exp(prediction) results = {"predictions": output, "version": _version} _logger.info( # El log nos devolverá el resultado definido: datos predichos # y la versiónde la librería con la que han sido generados. f"Making predictions with model version: {_version} " f"Inputs: {validated_data} " f"Predictions: {results}") return results #Las predicciones.
def make_prediction( *, input_data: t.Union[pd.DataFrame, dict], ) -> dict: """Make a prediction using a saved model pipeline.""" data = pd.DataFrame(input_data) # convert to df validated_data, errors = validate_inputs(input_data=data) results = {"predictions": None, "version": _version, "errors": errors} if not errors: predictions = _price_pipe.predict( X=validated_data[config.model_config.features]) results = { "predictions": [np.exp(pred) for pred in predictions], # type: ignore "version": _version, "errors": errors, } return results
def test_validate_inputs_test_data(sample_input_data): validated_inputs = validate_inputs(input_data=sample_input_data) assert validated_inputs.isnull().any().any() == False
def test_validate_inputs_forecast_data(forecast_input_data): validated_inputs = validate_inputs(input_data=forecast_input_data) assert validated_inputs.isnull().any().any() == False