Ejemplo n.º 1
0
def route(
    user_model: Any, request: Union[prediction_pb2.SeldonMessage, List, Dict]
) -> Union[prediction_pb2.SeldonMessage, List, Dict]:
    """

    Parameters
    ----------
    user_model
       A Seldon user model
    request
       A SelodonMessage proto
    Returns
    -------

    """
    is_proto = isinstance(request, prediction_pb2.SeldonMessage)

    if hasattr(user_model, "route_rest"):
        logger.warning("route_rest is deprecated. Please use route_raw")
        return user_model.route_rest(request)
    elif hasattr(user_model, "route_grpc"):
        logger.warning("route_grpc is deprecated. Please use route_raw")
        return user_model.route_grpc(request)
    else:
        if hasattr(user_model, "route_raw"):
            try:
                return user_model.route_raw(request)
            except SeldonNotImplementedError:
                pass

        if is_proto:
            (features, meta, datadef,
             data_type) = extract_request_parts(request)
            client_response = client_route(user_model,
                                           features,
                                           datadef.names,
                                           meta=meta)
            if not isinstance(client_response, int):
                raise SeldonMicroserviceException(
                    "Routing response must be int but got " +
                    str(client_response))
            client_response_arr = np.array([[client_response]])
            return construct_response(user_model, False, request,
                                      client_response_arr)
        else:
            (features, meta, datadef,
             data_type) = extract_request_parts_json(request)
            class_names = datadef[
                "names"] if datadef and "names" in datadef else []
            client_response = client_route(user_model,
                                           features,
                                           class_names,
                                           meta=meta)
            if not isinstance(client_response, int):
                raise SeldonMicroserviceException(
                    "Routing response must be int but got " +
                    str(client_response))
            client_response_arr = np.array([[client_response]])
            return construct_response_json(user_model, False, request,
                                           client_response_arr)
Ejemplo n.º 2
0
def client_custom_metrics(
    user_model: SeldonComponent,
    seldon_metrics: SeldonMetrics,
    method: str,
    runtime_metrics: List[Dict] = [],
) -> List[Dict]:
    """
    Get custom metrics for client and update SeldonMetrics.

    This function will return empty list if INCLUDE_METRICS_IN_CLIENT_RESPONSE environmental
    variable is NOT set to "true" or "True".

    Parameters
    ----------
    user_model
       A Seldon user model
    seldon_metrics
        A SeldonMetrics instance
    method:
        tag of a method that collected the metrics
    runtime_metrics:
        metrics that were defined on runtime
    Returns
    -------
       A list of custom metrics

    """
    if not validate_metrics(runtime_metrics):
        raise SeldonMicroserviceException(
            f"Bad metric created during request: {json.dumps(runtime_metrics)}",
            status_code=500,
            reason="MICROSERVICE_BAD_METRIC",
        )
    seldon_metrics.update(runtime_metrics, method)

    if hasattr(user_model, "metrics"):
        try:
            metrics = user_model.metrics()
            if not validate_metrics(metrics):
                raise SeldonMicroserviceException(
                    f"Bad metric created during request: {json.dumps(metrics)}",
                    status_code=500,
                    reason="MICROSERVICE_BAD_METRIC",
                )

            seldon_metrics.update(metrics, method)
            if INCLUDE_METRICS_IN_CLIENT_RESPONSE:
                return metrics + runtime_metrics
            else:
                return []
        except SeldonNotImplementedError:
            pass
    logger.debug("custom_metrics is not implemented")
    if INCLUDE_METRICS_IN_CLIENT_RESPONSE:
        return runtime_metrics
    else:
        return []
Ejemplo n.º 3
0
 def handle_generic_exception(e):
     error = SeldonMicroserviceException(
         message=str(e),
         status_code=500,
         reason="MICROSERVICE_INTERNAL_ERROR")
     response = jsonify(error.to_dict())
     logger.error("%s", error.to_dict())
     response.status_code = error.status_code
     return response
Ejemplo n.º 4
0
def extract_request_parts_json(
    request: Union[Dict, List]
) -> Tuple[
    Union[np.ndarray, str, bytes, Dict, List],
    Union[Dict, None],
    Union[np.ndarray, str, bytes, Dict, List, None],
    str,
]:
    """

    Parameters
    ----------
    request
       Input request in JSON format

    Returns
    -------
       Key parts of the request extracted

    """
    if not isinstance(request, dict):
        raise SeldonMicroserviceException(f"Invalid request data type: {request}")
    meta = request.get("meta", None)
    datadef_type = None
    datadef = None

    if "data" in request:
        data_type = "data"
        datadef = request["data"]
        if "tensor" in datadef:
            datadef_type = "tensor"
            tensor = datadef["tensor"]
            features = np.array(tensor["values"]).reshape(tensor["shape"])
        elif "ndarray" in datadef:
            datadef_type = "ndarray"
            features = np.array(datadef["ndarray"])
        elif "tftensor" in datadef:
            datadef_type = "tftensor"
            tf_proto = TensorProto()
            json_format.ParseDict(datadef["tftensor"], tf_proto)
            features = tf.make_ndarray(tf_proto)
        else:
            features = np.array([])
    elif "jsonData" in request:
        data_type = "jsonData"
        features = request["jsonData"]
    elif "strData" in request:
        data_type = "strData"
        features = request["strData"]
    elif "binData" in request:
        data_type = "binData"
        features = base64.b64decode(request["binData"])
    else:
        raise SeldonMicroserviceException(f"Invalid request data type: {request}")

    return features, meta, datadef, data_type
Ejemplo n.º 5
0
def json_to_seldon_messages(message_json: Dict) -> prediction_pb2.SeldonMessageList:
    message_proto = prediction_pb2.SeldonMessageList()
    try:
        json_format.ParseDict(message_json, message_proto)
        return message_proto
    except json_format.ParseError as pbExc:
        raise SeldonMicroserviceException("Invalid JSON: " + str(pbExc))
Ejemplo n.º 6
0
def get_data_from_proto(
    request: prediction_pb2.SeldonMessage,
) -> Union[np.ndarray, str, bytes, dict]:
    """
    Extract the data payload from the SeldonMessage

    Parameters
    ----------
    request
       SeldonMessage

    Returns
    -------
       Data payload as numpy array or the raw message format. Numpy array will be returned if the "data" field was used.

    """
    data_type = request.WhichOneof("data_oneof")
    if data_type == "data":
        datadef = request.data
        return grpc_datadef_to_array(datadef)
    elif data_type == "binData":
        return request.binData
    elif data_type == "strData":
        return request.strData
    elif data_type == "jsonData":
        return MessageToDict(request.jsonData)
    else:
        raise SeldonMicroserviceException("Unknown data in SeldonMessage")
Ejemplo n.º 7
0
def client_custom_metrics(user_model: SeldonComponent) -> List[Dict]:
    """
    Get custom metrics

    Parameters
    ----------
    user_model
       A Seldon user model

    Returns
    -------
       A list of custom metrics

    """
    if hasattr(user_model, "metrics"):
        try:
            metrics = user_model.metrics()
            if not validate_metrics(metrics):
                j_str = json.dumps(metrics)
                raise SeldonMicroserviceException(
                    "Bad metric created during request: " + j_str,
                    reason="MICROSERVICE_BAD_METRIC",
                )
            return metrics
        except SeldonNotImplementedError:
            pass
    logger.info("custom_metrics is not implemented")
    return []
Ejemplo n.º 8
0
def construct_response(user_model: SeldonComponent, is_request: bool, client_request: prediction_pb2.SeldonMessage,
                       client_raw_response: Union[np.ndarray, str, bytes, dict]) -> prediction_pb2.SeldonMessage:
    """

    Parameters
    ----------
    user_model
       Client user class
    is_request
       Whether this is part of the request flow as opposed to the response flow
    client_request
       The request received
    client_raw_response
       The raw client response from their model

    Returns
    -------
       A SeldonMessage proto response

    """
    data_type = client_request.WhichOneof("data_oneof")
    meta = prediction_pb2.Meta()
    meta_json: Dict = {}
    tags = client_custom_tags(user_model)
    if tags:
        meta_json["tags"] = tags
    metrics = client_custom_metrics(user_model)
    if metrics:
        meta_json["metrics"] = metrics
    if client_request.meta:
        if client_request.meta.puid:
            meta_json["puid"] = client_request.meta.puid
    json_format.ParseDict(meta_json, meta)
    if isinstance(client_raw_response, np.ndarray) or isinstance(client_raw_response, list):
        client_raw_response = np.array(client_raw_response)
        if is_request:
            names = client_feature_names(user_model, client_request.data.names)
        else:
            names = client_class_names(user_model, client_raw_response)
        if data_type == "data":  # If request is using defaultdata then return what was sent if is numeric response else ndarray
            if np.issubdtype(client_raw_response.dtype, np.number):
                default_data_type = client_request.data.WhichOneof("data_oneof")
            else:
                default_data_type = "ndarray"
        else:  # If numeric response return as tensor else return as ndarray
            if np.issubdtype(client_raw_response.dtype, np.number):
                default_data_type = "tensor"
            else:
                default_data_type = "ndarray"
        data = array_to_grpc_datadef(default_data_type, client_raw_response, names)
        return prediction_pb2.SeldonMessage(data=data, meta=meta)
    elif isinstance(client_raw_response, str):
        return prediction_pb2.SeldonMessage(strData=client_raw_response, meta=meta)
    elif isinstance(client_raw_response, dict):
        jsonDataResponse = ParseDict(client_raw_response, prediction_pb2.SeldonMessage().jsonData)
        return prediction_pb2.SeldonMessage(jsonData=jsonDataResponse, meta=meta)
    elif isinstance(client_raw_response, (bytes, bytearray)):
        return prediction_pb2.SeldonMessage(binData=client_raw_response, meta=meta)
    else:
        raise SeldonMicroserviceException("Unknown data type returned as payload:" + client_raw_response)
def parse_parameters(parameters: Dict) -> Dict:
    """
    Parse the user object parameters

    Parameters
    ----------
    parameters

    Returns
    -------

    """
    type_dict = {
        "INT": int,
        "FLOAT": float,
        "DOUBLE": float,
        "STRING": str,
        "BOOL": bool,
    }
    parsed_parameters = {}
    for param in parameters:
        name = param.get("name")
        value = param.get("value")
        type_ = param.get("type")
        if type_ == "BOOL":
            parsed_parameters[name] = bool(strtobool(value))
        else:
            try:
                parsed_parameters[name] = type_dict[type_](value)
            except ValueError:
                raise SeldonMicroserviceException(
                    "Bad model parameter: "
                    + name
                    + " with value "
                    + value
                    + " can't be parsed as a "
                    + type_,
                    reason="MICROSERVICE_BAD_PARAMETER",
                )
            except KeyError:
                raise SeldonMicroserviceException(
                    "Bad model parameter type: "
                    + type_
                    + " valid are INT, FLOAT, DOUBLE, STRING, BOOL",
                    reason="MICROSERVICE_BAD_PARAMETER",
                )
    return parsed_parameters
Ejemplo n.º 10
0
 def Metadata():
     if metadata_data is None:
         # None value represents validation error in current implementation
         # if user_model would not define init_metadata than metadata_data
         # would just contain a default values
         raise SeldonMicroserviceException(
             "Model metadata unavailable",
             status_code=500,
             reason="MICROSERVICE_BAD_METADATA",
         )
     logger.debug("REST Metadata Request")
     logger.debug("REST Metadata Response: %s", metadata_data)
     return jsonify(metadata_data)
Ejemplo n.º 11
0
def json_to_feedback(message_json: Dict) -> prediction_pb2.Feedback:
    """
    Parse a JSON message to a Feedback proto
    Parameters
    ----------
    message_json
       Input json message
    Returns
    -------
       A SeldonMessage
    """
    message_proto = prediction_pb2.Feedback()
    try:
        json_format.ParseDict(message_json, message_proto)
        return message_proto
    except json_format.ParseError as pbExc:
        raise SeldonMicroserviceException("Invalid JSON: " + str(pbExc))
Ejemplo n.º 12
0
    def process_event(self, inputs: Union[List, Dict], headers: Dict) -> Dict:
        """
        Process the event and return Alibi Detect score

        Parameters
        ----------
        inputs
             Input data
        headers
             Header options

        Returns
        -------
             SeldonResponse response

        """
        logging.info("PROCESSING Feedback Event.")
        logging.info(str(headers))
        logging.info("----")

        metrics = []
        output = {}

        if "truth" not in inputs:
            raise SeldonMicroserviceException(
                f"No truth value provided in: {json.dumps(inputs)}",
                status_code=400,
                reason="NO_TRUTH_VALUE",
            )

        # We automatically add any metrics provided in the incoming request
        if "metrics" in inputs:
            metrics.extend(inputs["metrics"])

        # If response is provided then we can perform a comparison
        # TODO: If Header UUID provided we could fetch from ELK to do the evaluation
        if "response" in inputs:
            response = inputs["response"]
            truth = inputs["truth"]
            r = self.model.transform(truth, response)
            metrics.extend(r.metrics)

        seldon_response = SeldonResponse(output or None, None, metrics)

        return seldon_response
Ejemplo n.º 13
0
def json_to_seldon_message(message_json: Dict) -> prediction_pb2.SeldonMessage:
    """
    Parses JSON input to a SeldonMessage proto
    Parameters
    ----------
    message_json
       JSON input

    Returns
    -------
      SeldonMessage
    """
    if message_json is None:
        message_json = {}
    message_proto = prediction_pb2.SeldonMessage()
    try:
        json_format.ParseDict(message_json, message_proto)
        return message_proto
    except json_format.ParseError as pbExc:
        raise SeldonMicroserviceException("Invalid JSON: " + str(pbExc))
def client_aggregate(user_model: SeldonComponent, features_list: List[Union[np.ndarray, str, bytes]],
                     feature_names_list: List) -> Union[np.ndarray, List, str, bytes]:
    """
    Aggregate payloads

    Parameters
    ----------
    user_model
       A Seldon user model
    features_list
       A list of payloads
    feature_names_list
       Column names for payloads
    Returns
    -------
       An aggregated payload
    """
    try:
        return user_model.aggregate(features_list, feature_names_list)
    except (NotImplementedError, AttributeError):
        raise SeldonMicroserviceException("Aggregate not defined")
Ejemplo n.º 15
0
def client_custom_metrics(user_model: SeldonComponent,
                          seldon_metrics: SeldonMetrics) -> List[Dict]:
    """
    Get custom metrics for client and update SeldonMetrics.

    This function will return empty list if INCLUDE_METRICS_IN_CLIENT_RESPONSE environmental
    variable is NOT set to "true" or "True".

    Parameters
    ----------
    user_model
       A Seldon user model
    seldon_metrics
        A SeldonMetrics instance

    Returns
    -------
       A list of custom metrics

    """
    if hasattr(user_model, "metrics"):
        try:
            metrics = user_model.metrics()
            if not validate_metrics(metrics):
                j_str = json.dumps(metrics)
                raise SeldonMicroserviceException(
                    "Bad metric created during request: " + j_str,
                    reason="MICROSERVICE_BAD_METRIC",
                )

            seldon_metrics.update(metrics)
            if INCLUDE_METRICS_IN_CLIENT_RESPONSE:
                return metrics
            else:
                return []
        except SeldonNotImplementedError:
            pass
    logger.debug("custom_metrics is not implemented")
    return []
def client_route(user_model: SeldonComponent, features: Union[np.ndarray, str, bytes],
                 feature_names: Iterable[str]) -> int:
    """
    Get routing from user model

    Parameters
    ----------
    user_model
       A Seldon user model
    features
       Payload
    feature_names
       Columns for payload

    Returns
    -------
       Routing index for one of children
    """
    try:
        return user_model.route(features, feature_names)
    except (NotImplementedError, AttributeError):
        raise SeldonMicroserviceException("Route not defined")
Ejemplo n.º 17
0
def json_to_seldon_model_metadata(
    metadata_json: Dict,
) -> prediction_pb2.SeldonModelMetadata:
    """
    Parses JSON input to SeldonModelMetadata proto

    Parameters
    ----------
    metadata_json
        JSON input

    Returns
    -------
        SeldonModelMetadata
    """
    if metadata_json is None:
        metadata_json = {}
    metadata_proto = prediction_pb2.SeldonModelMetadata()
    try:
        json_format.ParseDict(metadata_json, metadata_proto, ignore_unknown_fields=True)
        return metadata_proto
    except json_format.ParseError as pbExc:
        raise SeldonMicroserviceException(f"Invalid metadata: {pbExc}")
def client_custom_metrics(user_model: SeldonComponent) -> List[Dict]:
    """
    Get custom metrics

    Parameters
    ----------
    user_model
       A Seldon user model

    Returns
    -------
       A list of custom metrics

    """
    try:
        metrics = user_model.metrics()
        if not validate_metrics(metrics):
            j_str = json.dumps(metrics)
            raise SeldonMicroserviceException(
                "Bad metric created during request: " + j_str, reason="MICROSERVICE_BAD_METRIC")
        return metrics
    except (NotImplementedError, AttributeError):
        return []
Ejemplo n.º 19
0
def construct_response_json(
    user_model: SeldonComponent,
    is_request: bool,
    client_request_raw: Union[List, Dict],
    client_raw_response: Union[np.ndarray, str, bytes, dict],
    meta: dict = None,
    custom_metrics: List[Dict] = None,
) -> Union[List, Dict]:
    """
    This class converts a raw REST response into a JSON object that has the same structure as
    the SeldonMessage proto. This is necessary as the conversion using the SeldonMessage proto
    changes the Numeric types of all ints in a JSON into Floats.

    Parameters
    ----------
    user_model
       Client user class
    is_request
       Whether this is part of the request flow as opposed to the response flow
    client_request_raw
       The request received in JSON format
    client_raw_response
       The raw client response from their model

    Returns
    -------
       A SeldonMessage JSON response

    """
    response = {}

    if isinstance(client_raw_response, dict):
        response["jsonData"] = client_raw_response
    elif isinstance(client_raw_response, (bytes, bytearray)):
        base64_data = base64.b64encode(client_raw_response)
        response["binData"] = base64_data.decode("utf-8")
    elif isinstance(client_raw_response, str):
        response["strData"] = client_raw_response
    else:
        is_np = isinstance(client_raw_response, np.ndarray)
        is_list = isinstance(client_raw_response, list)
        if not (is_np or is_list):
            raise SeldonMicroserviceException(
                "Unknown data type returned as payload (must be list or np array):"
                + str(client_raw_response))
        if is_np:
            np_client_raw_response = client_raw_response
            list_client_raw_response = client_raw_response.tolist()
        else:
            np_client_raw_response = np.array(client_raw_response)
            list_client_raw_response = client_raw_response

        response["data"] = {}
        if "data" in client_request_raw:
            if np.issubdtype(np_client_raw_response.dtype, np.number):
                if "tensor" in client_request_raw["data"]:
                    default_data_type = "tensor"
                    result_client_response = {
                        "values": np_client_raw_response.ravel().tolist(),
                        "shape": np_client_raw_response.shape,
                    }
                elif "tftensor" in client_request_raw["data"]:
                    default_data_type = "tftensor"
                    tf_json_str = json_format.MessageToJson(
                        tf.make_tensor_proto(np_client_raw_response))
                    result_client_response = json.loads(tf_json_str)
                else:
                    default_data_type = "ndarray"
                    result_client_response = list_client_raw_response
            else:
                default_data_type = "ndarray"
                result_client_response = list_client_raw_response
        else:
            if np.issubdtype(np_client_raw_response.dtype, np.number):
                default_data_type = "tensor"
                result_client_response = {
                    "values": np_client_raw_response.ravel().tolist(),
                    "shape": np_client_raw_response.shape,
                }
            else:
                default_data_type = "ndarray"
                result_client_response = list_client_raw_response

        response["data"][default_data_type] = result_client_response

        if is_request:
            req_names = client_request_raw.get("data", {}).get("names", [])
            names = client_feature_names(user_model, req_names)
        else:
            names = client_class_names(user_model, np_client_raw_response)
        response["data"]["names"] = names

    response["meta"] = {}
    if meta:
        tags = meta.get("tags", {})
        metrics = meta.get("metrics", [])
    else:
        tags = {}
        metrics = []
    custom_tags = client_custom_tags(user_model)
    if custom_tags:
        tags.update(custom_tags)
    if custom_metrics:
        metrics.extend(custom_metrics)
    if tags:
        response["meta"]["tags"] = tags
    if metrics:
        response["meta"]["metrics"] = metrics
    puid = client_request_raw.get("meta", {}).get("puid", None)
    if puid:
        response["meta"]["puid"] = puid

    return response
Ejemplo n.º 20
0
 def __init__(self, message):
     SeldonMicroserviceException.__init__(self, message)
def aggregate(
    user_model: Any, request: Union[prediction_pb2.SeldonMessageList, List,
                                    Dict]
) -> Union[prediction_pb2.SeldonMessage, List, Dict]:
    """
    Aggregate a list of payloads

    Parameters
    ----------
    user_model
       A Seldon user model
    request
       SeldonMessage proto

    Returns
    -------
       Aggregated SeldonMessage proto

    """
    is_proto = isinstance(request, prediction_pb2.SeldonMessageList)

    if hasattr(user_model, "aggregate_rest"):
        logger.warning(
            "aggregate_rest is deprecated. Please use aggregate_raw")
        return user_model.aggregate_rest(request)
    elif hasattr(user_model, "aggregate_grpc"):
        logger.warning(
            "aggregate_grpc is deprecated. Please use aggregate_raw")
        return user_model.aggregate_grpc(request)
    else:
        if hasattr(user_model, "aggregate_raw"):
            try:
                return user_model.aggregate_raw(request)
            except SeldonNotImplementedError:
                pass

        if is_proto:
            features_list = []
            names_list = []

            for msg in request.seldonMessages:
                (features, meta, datadef,
                 data_type) = extract_request_parts(msg)
                features_list.append(features)
                names_list.append(datadef.names)

            client_response = client_aggregate(user_model, features_list,
                                               names_list)
            return construct_response(user_model, False,
                                      request.seldonMessages[0],
                                      client_response)
        else:
            features_list = []
            names_list = []

            if isinstance(request, list):
                msgs = request
            elif "seldonMessages" in request and isinstance(
                    request["seldonMessages"], list):
                msgs = request["seldonMessages"]
            else:
                raise SeldonMicroserviceException(
                    f"Invalid request data type: {request}")

            for msg in msgs:
                (features, meta, datadef,
                 data_type) = extract_request_parts_json(msg)
                class_names = datadef[
                    "names"] if datadef and "names" in datadef else []
                features_list.append(features)
                names_list.append(class_names)

            client_response = client_aggregate(user_model, features_list,
                                               names_list)
            return construct_response_json(user_model, False, msgs[0],
                                           client_response)
Ejemplo n.º 22
0
def aggregate(
    user_model: Any,
    request: Union[prediction_pb2.SeldonMessageList, List, Dict],
    seldon_metrics: SeldonMetrics,
) -> Union[prediction_pb2.SeldonMessage, List, Dict]:
    """
    Aggregate a list of payloads

    Parameters
    ----------
    user_model
       A Seldon user model
    request
       SeldonMessage proto
    seldon_metrics
        A SeldonMetrics instance

    Returns
    -------
       Aggregated SeldonMessage proto

    """
    def merge_meta(meta_list):
        tags = {}
        for meta in meta_list:
            if meta:
                tags.update(meta.get("tags", {}))
        return {"tags": tags}

    def merge_metrics(meta_list, custom_metrics):
        metrics = []
        for meta in meta_list:
            if meta:
                metrics.extend(meta.get("metrics", []))
        metrics.extend(custom_metrics)
        return metrics

    is_proto = isinstance(request, prediction_pb2.SeldonMessageList)

    if hasattr(user_model, "aggregate_rest"):
        logger.warning(
            "aggregate_rest is deprecated. Please use aggregate_raw")
        return user_model.aggregate_rest(request)
    elif hasattr(user_model, "aggregate_grpc"):
        logger.warning(
            "aggregate_grpc is deprecated. Please use aggregate_raw")
        return user_model.aggregate_grpc(request)
    else:
        if hasattr(user_model, "aggregate_raw"):
            try:
                response = user_model.aggregate_raw(request)
                handle_raw_custom_metrics(response, seldon_metrics, is_proto,
                                          AGGREGATE_METRIC_METHOD_TAG)
                return response
            except SeldonNotImplementedError:
                pass

        if is_proto:
            features_list = []
            names_list = []
            meta_list = []

            for msg in request.seldonMessages:
                (features, meta, datadef,
                 data_type) = extract_request_parts(msg)
                features_list.append(features)
                names_list.append(datadef.names)
                meta_list.append(meta)

            client_response = client_aggregate(user_model, features_list,
                                               names_list)

            metrics = client_custom_metrics(
                user_model,
                seldon_metrics,
                AGGREGATE_METRIC_METHOD_TAG,
                client_response.metrics,
            )

            return construct_response(
                user_model,
                False,
                request.seldonMessages[0],
                client_response.data,
                merge_meta(meta_list),
                merge_metrics(meta_list, metrics),
                client_response.tags,
            )
        else:
            features_list = []
            names_list = []

            if isinstance(request, list):
                msgs = request
            elif "seldonMessages" in request and isinstance(
                    request["seldonMessages"], list):
                msgs = request["seldonMessages"]
            else:
                raise SeldonMicroserviceException(
                    f"Invalid request data type: {request}")

            meta_list = []
            for msg in msgs:
                (features, meta, datadef,
                 data_type) = extract_request_parts_json(msg)
                class_names = datadef[
                    "names"] if datadef and "names" in datadef else []
                features_list.append(features)
                names_list.append(class_names)
                meta_list.append(meta)

            client_response = client_aggregate(user_model, features_list,
                                               names_list)

            metrics = client_custom_metrics(
                user_model,
                seldon_metrics,
                AGGREGATE_METRIC_METHOD_TAG,
                client_response.metrics,
            )

            return construct_response_json(
                user_model,
                False,
                msgs[0],
                client_response.data,
                merge_meta(meta_list),
                merge_metrics(meta_list, metrics),
                client_response.tags,
            )
Ejemplo n.º 23
0
    def process_event(self, inputs: Union[List, Dict], headers: Dict) -> Dict:
        """
        Process the event and return Alibi Detect score

        Parameters
        ----------
        inputs
             Input data
        headers
             Header options

        Returns
        -------
             SeldonResponse response

        """
        logging.info("PROCESSING Feedback Event.")
        logging.info(str(headers))
        logging.info("----")

        metrics = []
        output = {}
        truth = None
        response = None
        error = None

        if "truth" not in inputs:
            raise SeldonMicroserviceException(
                f"No truth value provided in: {json.dumps(inputs)}",
                status_code=400,
                reason="NO_TRUTH_VALUE",
            )
        else:
            truth = inputs["truth"]

        # We automatically add any metrics provided in the incoming request
        if "metrics" in inputs:
            metrics.extend(inputs["metrics"])

        # If response is provided then we can perform a comparison
        if "response" in inputs:
            response = inputs["response"]

        elif REQUEST_ID_HEADER_NAME in headers:
            # Otherwise if UUID is provided we can fetch from elasticsearch
            if not self.elasticsearch_client:
                error = "Seldon-Puid provided but elasticsearch client not configured"
            else:
                try:
                    seldon_puid = headers.get(REQUEST_ID_HEADER_NAME, "")
                    seldon_namespace = headers.get(NAMESPACE_HEADER_NAME, "")

                    # Currently only supports SELDON inference type (not kfserving)
                    elasticsearch_index = f"inference-log-seldon-{seldon_namespace}-{SELDON_DEPLOYMENT_ID}-{SELDON_PREDICTOR_ID}"

                    doc = self.elasticsearch_client.get(
                        index=elasticsearch_index, id=seldon_puid)
                    response = (doc.get("_source",
                                        {}).get("response",
                                                None).get("instance", None))
                    if not response:
                        error = f"Elasticsearch index {elasticsearch_index} with id {seldon_puid} did not contain response value"
                except NotFoundError:
                    error = f"Elasticsearch index {elasticsearch_index} with id {seldon_puid} not found"
        else:
            error = "Neither response nor request Puid provided in headers"

        if error:
            raise SeldonMicroserviceException(error,
                                              status_code=400,
                                              reason="METRICS_SERVER_ERROR")

        logging.error(f"{truth}, {response}")
        output = self.model.transform(truth, response)
        seldon_response = SeldonResponse.create(output or None)

        seldon_response.metrics.extend(metrics)

        return seldon_response
 def predict(self, X, features_names, **kwargs):
     raise SeldonMicroserviceException("foo", status_code=403)
Ejemplo n.º 25
0
    def post(self):
        """
        Handle post request. Extract data. Call event handler and optionally send a reply event.

        """
        if not self.model.ready:
            self.model.load()

        try:
            body = json.loads(self.request.body)
        except json.decoder.JSONDecodeError as e:
            raise tornado.web.HTTPError(
                status_code=HTTPStatus.BAD_REQUEST,
                reason="Unrecognized request format: %s" % e,
            )

        # Extract payload from request
        request_handler: RequestHandler = get_request_handler(
            self.protocol, body)
        request_handler.validate()
        request = request_handler.extract_request()

        # Create event from request body
        event = v02.Event()
        http_marshaller = marshaller.NewDefaultHTTPMarshaller()
        event = http_marshaller.FromRequest(event, self.request.headers,
                                            self.request.body, json.loads)
        logging.debug(json.dumps(event.Properties()))

        # Extract any desired request headers
        headers = {}

        for (key, val) in self.request.headers.get_all():
            headers[key] = val

        response = self.model.process_event(request, headers)
        seldon_response = SeldonResponse.create(response)

        if seldon_response.data is not None:
            responseStr = json.dumps(seldon_response.data)

            # Create event from response if reply_url is active
            if not self.reply_url == "":
                if event.EventID() is None or event.EventID() == "":
                    resp_event_id = uuid.uuid1().hex
                else:
                    resp_event_id = event.EventID()
                revent = (
                    v02.Event().SetContentType("application/json").SetData(
                        responseStr).SetEventID(resp_event_id).SetSource(
                            self.event_source).SetEventType(
                                self.event_type).SetExtensions(
                                    event.Extensions()))
                logging.debug(json.dumps(revent.Properties()))
                sendCloudEvent(revent, self.reply_url)
            self.write(json.dumps(seldon_response.data))

        runtime_metrics = seldon_response.metrics
        if runtime_metrics is not None:
            if not validate_metrics(runtime_metrics):
                raise SeldonMicroserviceException(
                    f"Bad metric created during request: {json.dumps(runtime_metrics)}",
                    status_code=500,
                    reason="MICROSERVICE_BAD_METRIC",
                )
            self.seldon_metrics.update(runtime_metrics, "ce_server")