def test_create_grpc_reponse_nparray(): user_model = UserObject() request = prediction_pb2.SeldonMessage() raw_response = np.array([[1, 2, 3]]) sm = scu.construct_response(user_model, True, request, raw_response) assert sm.data.WhichOneof("data_oneof") == "tensor" assert sm.data.tensor.values == [1, 2, 3]
def route( user_model: Any, request: Union[prediction_pb2.SeldonMessage, List, Dict] ) -> Union[prediction_pb2.SeldonMessage, List, Dict]: """ Parameters ---------- user_model A Seldon user model request A SelodonMessage proto Returns ------- """ is_proto = isinstance(request, prediction_pb2.SeldonMessage) if hasattr(user_model, "route_rest"): logger.warning("route_rest is deprecated. Please use route_raw") return user_model.route_rest(request) elif hasattr(user_model, "route_grpc"): logger.warning("route_grpc is deprecated. Please use route_raw") return user_model.route_grpc(request) else: if hasattr(user_model, "route_raw"): try: return user_model.route_raw(request) except SeldonNotImplementedError: pass if is_proto: (features, meta, datadef, data_type) = extract_request_parts(request) client_response = client_route(user_model, features, datadef.names, meta=meta) if not isinstance(client_response, int): raise SeldonMicroserviceException( "Routing response must be int but got " + str(client_response)) client_response_arr = np.array([[client_response]]) return construct_response(user_model, False, request, client_response_arr) else: (features, meta, datadef, data_type) = extract_request_parts_json(request) class_names = datadef[ "names"] if datadef and "names" in datadef else [] client_response = client_route(user_model, features, class_names, meta=meta) if not isinstance(client_response, int): raise SeldonMicroserviceException( "Routing response must be int but got " + str(client_response)) client_response_arr = np.array([[client_response]]) return construct_response_json(user_model, False, request, client_response_arr)
def test_create_grpc_reponse_ndarray(): user_model = UserObject() request_data = np.array([[5, 6, 7]]) datadef = scu.array_to_grpc_datadef("ndarray", request_data) request = prediction_pb2.SeldonMessage(data=datadef) raw_response = np.array([[1, 2, 3]]) sm = scu.construct_response(user_model, True, request, raw_response) assert sm.data.WhichOneof("data_oneof") == "ndarray"
def test_create_grpc_response_strdata(): user_model = UserObject() request_data = np.array([[5, 6, 7]]) datadef = scu.array_to_grpc_datadef("ndarray", request_data) request = prediction_pb2.SeldonMessage(data=datadef) raw_response = "hello world" sm = scu.construct_response(user_model, True, request, raw_response) assert sm.data.WhichOneof("data_oneof") == None assert len(sm.strData) > 0
def transform_output( user_model: Any, request: Union[prediction_pb2.SeldonMessage, List, Dict]) \ -> Union[prediction_pb2.SeldonMessage, List, Dict]: """ Parameters ---------- user_model User defined class to handle transform input request The incoming request Returns ------- The transformed request """ is_proto = isinstance(request, prediction_pb2.SeldonMessage) if hasattr(user_model, "transform_output_rest"): logger.warning( "transform_input_rest is deprecated. Please use transform_input_raw" ) return user_model.transform_output_rest(request) elif hasattr(user_model, "transform_output_grpc"): logger.warning( "transform_input_grpc is deprecated. Please use transform_input_raw" ) return user_model.transform_output_grpc(request) else: if hasattr(user_model, "transform_output_raw"): try: return user_model.transform_output_raw(request) except SeldonNotImplementedError: pass if is_proto: (features, meta, datadef, data_type) = extract_request_parts(request) client_response = client_transform_output(user_model, features, datadef.names, meta=meta) return construct_response(user_model, False, request, client_response) else: (features, meta, datadef, data_type) = extract_request_parts_json(request) class_names = datadef[ "names"] if datadef and "names" in datadef else [] client_response = client_transform_output(user_model, features, class_names, meta=meta) return construct_response_json(user_model, False, request, client_response)
def test_create_grpc_reponse_binary(): user_model = UserObject() request_data = np.array([[5, 6, 7]]) datadef = scu.array_to_grpc_datadef("tensor", request_data) request = prediction_pb2.SeldonMessage(data=datadef) raw_response = b"binary" sm = scu.construct_response(user_model, True, request, raw_response) assert sm.data.WhichOneof("data_oneof") == None assert len(sm.strData) == 0 assert len(sm.binData) > 0
def test_create_grpc_response_jsondata(): user_model = UserObject() request_data = np.array([[5, 6, 7]]) datadef = scu.array_to_grpc_datadef("ndarray", request_data) request = prediction_pb2.SeldonMessage(data=datadef) raw_response = {"output": "data"} sm = scu.construct_response(user_model, True, request, raw_response) assert sm.data.WhichOneof("data_oneof") == None emptyValue = Value() assert sm.jsonData != emptyValue
def test_create_grpc_response_customdata(): user_model = UserObject() request_data = np.array([[5, 6, 7]]) datadef = scu.array_to_grpc_datadef("ndarray", request_data) request = prediction_pb2.SeldonMessage(data=datadef) raw_response = any_pb2.Any(value=b"testdata") sm = scu.construct_response(user_model, True, request, raw_response) assert sm.data.WhichOneof("data_oneof") is None emptyValue = Value() assert sm.customData != emptyValue
def send_feedback( user_model: Any, request: prediction_pb2.Feedback, predictive_unit_id: str, seldon_metrics: SeldonMetrics, ) -> prediction_pb2.SeldonMessage: """ Parameters ---------- user_model A Seldon user model request SeldonMesage proto predictive_unit_id The ID of the enclosing container predictive unit. Will be taken from environment. Returns ------- """ seldon_metrics.update_reward(request.reward) if hasattr(user_model, "send_feedback_rest"): logger.warning( "send_feedback_rest is deprecated. Please use send_feedback_raw") request_json = json_format.MessageToJson(request) response_json = user_model.send_feedback_rest(request_json) return json_to_seldon_message(response_json) elif hasattr(user_model, "send_feedback_grpc"): logger.warning( "send_feedback_grpc is deprecated. Please use send_feedback_raw") response_json = user_model.send_feedback_grpc(request) return json_to_seldon_message(response_json) else: if hasattr(user_model, "send_feedback_raw"): try: return user_model.send_feedback_raw(request) except SeldonNotImplementedError: pass (datadef_request, features, truth, reward) = extract_feedback_request_parts(request) routing = request.response.meta.routing.get(predictive_unit_id) client_response = client_send_feedback(user_model, features, datadef_request.names, reward, truth, routing) if client_response is None: client_response = np.array([]) else: client_response = np.array(client_response) return construct_response(user_model, False, request.request, client_response)
def predict( user_model: Any, request: Union[prediction_pb2.SeldonMessage, List, Dict] ) -> Union[prediction_pb2.SeldonMessage, List, Dict]: """ Call the user model to get a prediction and package the response Parameters ---------- user_model User defined class instance request The incoming request Returns ------- The prediction """ is_proto = isinstance(request, prediction_pb2.SeldonMessage) if hasattr(user_model, "predict_rest") and not is_proto: logger.warning("predict_rest is deprecated. Please use predict_raw") return user_model.predict_rest(request) elif hasattr(user_model, "predict_grpc") and is_proto: logger.warning("predict_grpc is deprecated. Please use predict_raw") return user_model.predict_grpc(request) else: if hasattr(user_model, "predict_raw"): try: return user_model.predict_raw(request) except SeldonNotImplementedError: pass if is_proto: (features, meta, datadef, data_type) = extract_request_parts(request) client_response = client_predict(user_model, features, datadef.names, meta=meta) return construct_response(user_model, False, request, client_response) else: (features, meta, datadef, data_type) = extract_request_parts_json(request) class_names = datadef[ "names"] if datadef and "names" in datadef else [] client_response = client_predict(user_model, features, class_names, meta=meta) return construct_response_json(user_model, False, request, client_response)
def test_create_grpc_reponse_text_ndarray(): user_model = UserObject() request_data = np.array([["hello", "world"], ["hello", "another", "world"]]) datadef = scu.array_to_grpc_datadef("ndarray", request_data) request = prediction_pb2.SeldonMessage(data=datadef) (features, meta, datadef, data_type) = scu.extract_request_parts(request) raw_response = np.array([["hello", "world"], ["here", "another"]]) sm = scu.construct_response(user_model, True, request, raw_response) assert sm.data.WhichOneof("data_oneof") == "ndarray" assert type(features[0]) == list assert np.array_equal(sm.data.ndarray, raw_response) assert datadef == request.data assert np.array_equal(features, request_data) assert data_type == "data"
def aggregate( user_model: Any, request: Union[prediction_pb2.SeldonMessageList, List, Dict] ) -> Union[prediction_pb2.SeldonMessage, List, Dict]: """ Aggregate a list of payloads Parameters ---------- user_model A Seldon user model request SeldonMessage proto Returns ------- Aggregated SeldonMessage proto """ is_proto = isinstance(request, prediction_pb2.SeldonMessageList) if hasattr(user_model, "aggregate_rest"): logger.warning( "aggregate_rest is deprecated. Please use aggregate_raw") return user_model.aggregate_rest(request) elif hasattr(user_model, "aggregate_grpc"): logger.warning( "aggregate_grpc is deprecated. Please use aggregate_raw") return user_model.aggregate_grpc(request) else: if hasattr(user_model, "aggregate_raw"): try: return user_model.aggregate_raw(request) except SeldonNotImplementedError: pass if is_proto: features_list = [] names_list = [] for msg in request.seldonMessages: (features, meta, datadef, data_type) = extract_request_parts(msg) features_list.append(features) names_list.append(datadef.names) client_response = client_aggregate(user_model, features_list, names_list) return construct_response(user_model, False, request.seldonMessages[0], client_response) else: features_list = [] names_list = [] if isinstance(request, list): msgs = request elif "seldonMessages" in request and isinstance( request["seldonMessages"], list): msgs = request["seldonMessages"] else: raise SeldonMicroserviceException( f"Invalid request data type: {request}") for msg in msgs: (features, meta, datadef, data_type) = extract_request_parts_json(msg) class_names = datadef[ "names"] if datadef and "names" in datadef else [] features_list.append(features) names_list.append(class_names) client_response = client_aggregate(user_model, features_list, names_list) return construct_response_json(user_model, False, msgs[0], client_response)
def predict( user_model: Any, request: Union[prediction_pb2.SeldonMessage, List, Dict, bytes], seldon_metrics: SeldonMetrics, ) -> Union[prediction_pb2.SeldonMessage, List, Dict, bytes]: """ Call the user model to get a prediction and package the response Parameters ---------- user_model User defined class instance request The incoming request Returns ------- The prediction """ # TODO: Find a way to choose predict_rest or predict_grpc when payload is # not decoded is_proto = isinstance(request, prediction_pb2.SeldonMessage) if hasattr(user_model, "predict_rest") and not is_proto: logger.warning("predict_rest is deprecated. Please use predict_raw") return user_model.predict_rest(request) elif hasattr(user_model, "predict_grpc") and is_proto: logger.warning("predict_grpc is deprecated. Please use predict_raw") return user_model.predict_grpc(request) else: if hasattr(user_model, "predict_raw"): try: response = user_model.predict_raw(request) handle_raw_custom_metrics(response, seldon_metrics, is_proto, PREDICT_METRIC_METHOD_TAG) return response except SeldonNotImplementedError: pass if is_proto: (features, meta, datadef, data_type) = extract_request_parts(request) client_response = client_predict(user_model, features, datadef.names, meta=meta) metrics = client_custom_metrics( user_model, seldon_metrics, PREDICT_METRIC_METHOD_TAG, client_response.metrics, ) return construct_response( user_model, False, request, client_response.data, meta, metrics, client_response.tags, ) else: (features, meta, datadef, data_type) = extract_request_parts_json(request) class_names = datadef[ "names"] if datadef and "names" in datadef else [] client_response = client_predict(user_model, features, class_names, meta=meta) metrics = client_custom_metrics( user_model, seldon_metrics, PREDICT_METRIC_METHOD_TAG, client_response.metrics, ) return construct_response_json( user_model, False, request, client_response.data, meta, metrics, client_response.tags, )
def aggregate( user_model: Any, request: Union[prediction_pb2.SeldonMessageList, List, Dict], seldon_metrics: SeldonMetrics, ) -> Union[prediction_pb2.SeldonMessage, List, Dict]: """ Aggregate a list of payloads Parameters ---------- user_model A Seldon user model request SeldonMessage proto seldon_metrics A SeldonMetrics instance Returns ------- Aggregated SeldonMessage proto """ def merge_meta(meta_list): tags = {} for meta in meta_list: if meta: tags.update(meta.get("tags", {})) return {"tags": tags} def merge_metrics(meta_list, custom_metrics): metrics = [] for meta in meta_list: if meta: metrics.extend(meta.get("metrics", [])) metrics.extend(custom_metrics) return metrics is_proto = isinstance(request, prediction_pb2.SeldonMessageList) if hasattr(user_model, "aggregate_rest"): logger.warning( "aggregate_rest is deprecated. Please use aggregate_raw") return user_model.aggregate_rest(request) elif hasattr(user_model, "aggregate_grpc"): logger.warning( "aggregate_grpc is deprecated. Please use aggregate_raw") return user_model.aggregate_grpc(request) else: if hasattr(user_model, "aggregate_raw"): try: response = user_model.aggregate_raw(request) handle_raw_custom_metrics(response, seldon_metrics, is_proto, AGGREGATE_METRIC_METHOD_TAG) return response except SeldonNotImplementedError: pass if is_proto: features_list = [] names_list = [] meta_list = [] for msg in request.seldonMessages: (features, meta, datadef, data_type) = extract_request_parts(msg) features_list.append(features) names_list.append(datadef.names) meta_list.append(meta) client_response = client_aggregate(user_model, features_list, names_list) metrics = client_custom_metrics( user_model, seldon_metrics, AGGREGATE_METRIC_METHOD_TAG, client_response.metrics, ) return construct_response( user_model, False, request.seldonMessages[0], client_response.data, merge_meta(meta_list), merge_metrics(meta_list, metrics), client_response.tags, ) else: features_list = [] names_list = [] if isinstance(request, list): msgs = request elif "seldonMessages" in request and isinstance( request["seldonMessages"], list): msgs = request["seldonMessages"] else: raise SeldonMicroserviceException( f"Invalid request data type: {request}") meta_list = [] for msg in msgs: (features, meta, datadef, data_type) = extract_request_parts_json(msg) class_names = datadef[ "names"] if datadef and "names" in datadef else [] features_list.append(features) names_list.append(class_names) meta_list.append(meta) client_response = client_aggregate(user_model, features_list, names_list) metrics = client_custom_metrics( user_model, seldon_metrics, AGGREGATE_METRIC_METHOD_TAG, client_response.metrics, ) return construct_response_json( user_model, False, msgs[0], client_response.data, merge_meta(meta_list), merge_metrics(meta_list, metrics), client_response.tags, )
def transform_input( user_model: Any, request: Union[prediction_pb2.SeldonMessage, List, Dict], seldon_metrics: SeldonMetrics, ) -> Union[prediction_pb2.SeldonMessage, List, Dict]: """ Parameters ---------- user_model User defined class to handle transform input request The incoming request Returns ------- The transformed request """ is_proto = isinstance(request, prediction_pb2.SeldonMessage) if hasattr(user_model, "transform_input_rest"): logger.warning( "transform_input_rest is deprecated. Please use transform_input_raw" ) return user_model.transform_input_rest(request) elif hasattr(user_model, "transform_input_grpc"): logger.warning( "transform_input_grpc is deprecated. Please use transform_input_raw" ) return user_model.transform_input_grpc(request) else: if hasattr(user_model, "transform_input_raw"): try: response = user_model.transform_input_raw(request) if is_proto: metrics = seldon_message_to_json(response.meta).get( "metrics", []) else: metrics = response.get("meta", {}).get("metrics", []) seldon_metrics.update(metrics) return response except SeldonNotImplementedError: pass if is_proto: (features, meta, datadef, data_type) = extract_request_parts(request) client_response = client_transform_input(user_model, features, datadef.names, meta=meta) metrics = client_custom_metrics(user_model) if seldon_metrics is not None: seldon_metrics.update(metrics) return construct_response(user_model, False, request, client_response, meta, metrics) else: (features, meta, datadef, data_type) = extract_request_parts_json(request) class_names = datadef[ "names"] if datadef and "names" in datadef else [] client_response = client_transform_input(user_model, features, class_names, meta=meta) metrics = client_custom_metrics(user_model) if seldon_metrics is not None: seldon_metrics.update(metrics) return construct_response_json(user_model, False, request, client_response, meta, metrics)