def run_error_server(host, port, exc_value): model_api = base_api_blueprint() @model_api.route("/predict/", methods=["POST"]) def predict(): return {"message": "ERROR: {}".format(exc_value)}, HTTP_513_DRUM_PIPELINE_ERROR app = get_flask_app(model_api) app.run(host, port)
def _materialize(self, parent_data_objs, user_data): model_api = base_api_blueprint() @model_api.route("/health/", methods=["GET"]) def health(): return {"message": "OK"}, HTTP_200_OK @model_api.route("/predict/", methods=["POST"]) def predict(): logger.debug("Entering predict() endpoint") self._stats_collector.enable() self._stats_collector.mark("start") response, response_status = self.do_predict(logger=logger) self._stats_collector.mark("finish") self._stats_collector.disable() return response, response_status @model_api.route("/stats/", methods=["GET"]) def stats(): mem_info = self._memory_monitor.collect_memory_info() ret_dict = {"mem_info": mem_info._asdict()} self._stats_collector.round() ret_dict["time_info"] = {} for name in self._stats_collector.get_report_names(): d = self._stats_collector.dict_report(name) ret_dict["time_info"][name] = d self._stats_collector.stats_reset() return ret_dict, HTTP_200_OK @model_api.errorhandler(Exception) def handle_exception(e): logger.exception(e) return { "message": "ERROR: {}".format(e) }, HTTP_500_INTERNAL_SERVER_ERROR app = get_flask_app(model_api) logging.getLogger("werkzeug").setLevel(logger.getEffectiveLevel()) host = self._params.get("host", None) port = self._params.get("port", None) try: app.run(host, port, threaded=False) except OSError as e: raise DrumCommonException("{}: host: {}; port: {}".format( e, host, port)) if self._stats_collector: self._stats_collector.print_reports() return []
def _materialize(self, parent_data_objs, user_data): model_api = base_api_blueprint() @model_api.route("/health/", methods=["GET"]) def health(): return {"message": "OK"}, HTTP_200_OK @model_api.route("/predict/", methods=["POST"]) def predict(): response_status = HTTP_200_OK file_key = "X" logger.debug("Entering predict() endpoint") REGRESSION_PRED_COLUMN = "Predictions" filename = request.files[file_key] if file_key in request.files else None logger.debug("Filename provided under X key: {}".format(filename)) if not filename: wrong_key_error_message = "Samples should be provided as a csv file under `{}` key.".format( file_key ) logger.error(wrong_key_error_message) response_status = HTTP_422_UNPROCESSABLE_ENTITY return {"message": "ERROR: " + wrong_key_error_message}, response_status in_df = pd.read_csv(filename) # TODO labels have to be provided as command line arguments or within configure endpoint self._stats_collector.enable() self._stats_collector.mark("start") out_df = self._predictor.predict(in_df) num_columns = len(out_df.columns) # float32 is not JSON serializable, so cast to float, which is float64 out_df = out_df.astype("float") if num_columns == 1: # df.to_json() is much faster. # But as it returns string, we have to assemble final json using strings. df_json = out_df[REGRESSION_PRED_COLUMN].to_json(orient="records") response_json = '{{"predictions":{df_json}}}'.format(df_json=df_json) elif num_columns == 2: # df.to_json() is much faster. # But as it returns string, we have to assemble final json using strings. df_json_str = out_df.to_json(orient="records") response_json = '{{"predictions":{df_json}}}'.format(df_json=df_json_str) else: ret_str = ( "Predictions dataframe has {} columns; " "Expected: 1 - for regression, 2 - for binary classification.".format( num_columns ) ) response_json = {"message": "ERROR: " + ret_str} response_status = HTTP_422_UNPROCESSABLE_ENTITY self._stats_collector.mark("finish") self._stats_collector.disable() return response_json, response_status @model_api.route("/stats/", methods=["GET"]) def stats(): mem_info = self._memory_monitor.collect_memory_info() ret_dict = {"mem_info": mem_info._asdict()} self._stats_collector.round() ret_dict["time_info"] = {} for name in self._stats_collector.get_report_names(): d = self._stats_collector.dict_report(name) ret_dict["time_info"][name] = d self._stats_collector.stats_reset() return ret_dict, HTTP_200_OK @model_api.errorhandler(Exception) def handle_exception(e): logger.exception(e) return {"message": "ERROR: {}".format(e)}, HTTP_500_INTERNAL_SERVER_ERROR app = get_flask_app(model_api) logging.getLogger("werkzeug").setLevel(logger.getEffectiveLevel()) host = self._params.get("host", None) port = self._params.get("port", None) try: app.run(host, port, threaded=self._threaded) except OSError as e: raise DrumCommonException("{}: host: {}; port: {}".format(e, host, port)) if self._stats_collector: self._stats_collector.print_reports() return []
def _materialize(self, parent_data_objs, user_data): model_api = base_api_blueprint() @model_api.route("/capabilities/", methods=["GET"]) def capabilities(): return make_predictor_capabilities(self._predictor.supported_payload_formats) @model_api.route("/info/", methods=["GET"]) def info(): model_info = self._predictor.model_info() model_info.update({ModelInfoKeys.LANGUAGE: self._run_language.value}) model_info.update({ModelInfoKeys.DRUM_VERSION: drum_version}) model_info.update({ModelInfoKeys.DRUM_SERVER: "flask"}) model_info.update( {ModelInfoKeys.MODEL_METADATA: read_model_metadata_yaml(self._code_dir)} ) return model_info, HTTP_200_OK @model_api.route("/health/", methods=["GET"]) def health(): return {"message": "OK"}, HTTP_200_OK @model_api.route("/predictions/", methods=["POST"]) @model_api.route("/predict/", methods=["POST"]) def predict(): logger.debug("Entering predict() endpoint") self._stats_collector.enable() self._stats_collector.mark("start") try: response, response_status = self.do_predict_structured(logger=logger) finally: self._stats_collector.mark("finish") self._stats_collector.disable() return response, response_status @model_api.route("/transform/", methods=["POST"]) def transform(): logger.debug("Entering transform() endpoint") self._stats_collector.enable() self._stats_collector.mark("start") try: response, response_status = self.do_transform(logger=logger) finally: self._stats_collector.mark("finish") self._stats_collector.disable() return response, response_status @model_api.route("/predictionsUnstructured/", methods=["POST"]) @model_api.route("/predictUnstructured/", methods=["POST"]) def predict_unstructured(): logger.debug("Entering predict() endpoint") self._stats_collector.enable() self._stats_collector.mark("start") try: response, response_status = self.do_predict_unstructured(logger=logger) finally: self._stats_collector.mark("finish") self._stats_collector.disable() return response, response_status @model_api.route("/stats/", methods=["GET"]) def stats(): mem_info = self._memory_monitor.collect_memory_info() ret_dict = {"mem_info": mem_info._asdict()} self._stats_collector.round() ret_dict["time_info"] = {} for name in self._stats_collector.get_report_names(): d = self._stats_collector.dict_report(name) ret_dict["time_info"][name] = d self._stats_collector.stats_reset() return ret_dict, HTTP_200_OK @model_api.errorhandler(Exception) def handle_exception(e): logger.exception(e) return {"message": "ERROR: {}".format(e)}, HTTP_500_INTERNAL_SERVER_ERROR # Disables warning for development server cli = sys.modules["flask.cli"] cli.show_server_banner = lambda *x: None app = get_flask_app(model_api) host = self._params.get("host", None) port = self._params.get("port", None) try: app.run(host, port, threaded=False) except OSError as e: raise DrumCommonException("{}: host: {}; port: {}".format(e, host, port)) if self._stats_collector: self._stats_collector.print_reports() return []