示例#1
0
def test_model_template_app_grpc(tracing):
    with start_microservice(join(dirname(__file__), "model-template-app"),tracing=tracing,grpc=True):
        data = np.array([[1,2]])
        datadef = prediction_pb2.DefaultData(
            tensor = prediction_pb2.Tensor(
                shape = data.shape,
                values = data.flatten()
            )
        )
        request = prediction_pb2.SeldonMessage(data = datadef)
        channel = grpc.insecure_channel("0.0.0.0:5000")
        stub = prediction_pb2_grpc.ModelStub(channel)
        response = stub.Predict(request=request)
        assert response.data.tensor.shape[0] == 1
        assert response.data.tensor.shape[1] == 2
        assert response.data.tensor.values[0] == 1
        assert response.data.tensor.values[1] == 2

        arr = np.array([1, 2])
        datadef = prediction_pb2.DefaultData(
            tensor=prediction_pb2.Tensor(
                shape=(2, 1),
                values=arr
            )
        )
        request = prediction_pb2.SeldonMessage(data=datadef)
        feedback = prediction_pb2.Feedback(request=request,reward=1.0)
        response = stub.SendFeedback(request=request)
示例#2
0
def test_proto_feedback_custom():
    user_object = UserObjectLowLevel()
    app = SeldonModelGRPC(user_object)
    arr = np.array([1, 2])
    datadef = prediction_pb2.DefaultData(
        tensor=prediction_pb2.Tensor(shape=(2, 1), values=arr))
    request = prediction_pb2.SeldonMessage(data=datadef)
    feedback = prediction_pb2.Feedback(request=request, reward=1.0)
    resp = app.SendFeedback(feedback, None)
示例#3
0
def json_to_feedback(message_json: Dict) -> prediction_pb2.Feedback:
    """
    Parse a JSON message to a Feedback proto
    Parameters
    ----------
    message_json
       Input json message
    Returns
    -------
       A SeldonMessage
    """
    message_proto = prediction_pb2.Feedback()
    try:
        json_format.ParseDict(message_json, message_proto)
        return message_proto
    except json_format.ParseError as pbExc:
        raise SeldonMicroserviceException("Invalid JSON: " + str(pbExc))
def test_proto_feedback():
    user_object = UserObject()
    app = SeldonModelGRPC(user_object)
    arr = np.array([1, 2])
    datadef = prediction_pb2.DefaultData(
        tensor=prediction_pb2.Tensor(shape=(2, 1), values=arr))
    meta = prediction_pb2.Meta()
    metaJson = {}
    routing = {"1": 1}
    metaJson["routing"] = routing
    json_format.ParseDict(metaJson, meta)

    request = prediction_pb2.SeldonMessage(data=datadef)
    response = prediction_pb2.SeldonMessage(meta=meta, data=datadef)
    feedback = prediction_pb2.Feedback(request=request,
                                       response=response,
                                       reward=1.0)
    resp = app.SendFeedback(feedback, None)
示例#5
0
def test_model_template_app_grpc(microservice):
    data = np.array([[1, 2]])
    datadef = prediction_pb2.DefaultData(
        tensor=prediction_pb2.Tensor(shape=data.shape, values=data.flatten()))
    request = prediction_pb2.SeldonMessage(data=datadef)
    channel = grpc.insecure_channel("0.0.0.0:5000")
    stub = prediction_pb2_grpc.ModelStub(channel)
    response = retry_method(stub.Predict, kwargs=dict(request=request))
    assert response.data.tensor.shape[0] == 1
    assert response.data.tensor.shape[1] == 2
    assert response.data.tensor.values[0] == 1
    assert response.data.tensor.values[1] == 2

    arr = np.array([1, 2])
    datadef = prediction_pb2.DefaultData(
        tensor=prediction_pb2.Tensor(shape=(2, 1), values=arr))
    request = prediction_pb2.SeldonMessage(data=datadef)
    feedback = prediction_pb2.Feedback(request=request, reward=1.0)
    response = stub.SendFeedback(request=request)
示例#6
0
def run_send_feedback(args):
    contract = json.load(open(args.contract, 'r'))
    contract = unfold_contract(contract)
    feature_names = [feature["name"] for feature in contract["features"]]
    response_names = [feature["name"] for feature in contract["targets"]]

    REST_url = "http://" + args.host + ":" + str(args.port) + "/send-feedback"

    for i in range(args.n_requests):
        batch = generate_batch(contract, args.batch_size, 'features')
        response = generate_batch(contract, args.batch_size, 'targets')
        if args.prnt:
            print('-' * 40)
            print("SENDING NEW REQUEST:")

        if not args.grpc and not args.fbs:
            REST_request = gen_REST_request(batch,
                                            features=feature_names,
                                            tensor=args.tensor)
            REST_response = gen_REST_request(response,
                                             features=response_names,
                                             tensor=args.tensor)
            reward = 1.0
            REST_feedback = {
                "request": REST_request,
                "response": REST_response,
                "reward": reward
            }
            if args.prnt:
                print(REST_feedback)

            t1 = time()
            response = requests.post(REST_url,
                                     data={"json": json.dumps(REST_feedback)})
            t2 = time()

            if args.prnt:
                print("Time " + str(t2 - t1))
                print(response)
        elif args.grpc:
            GRPC_request = gen_GRPC_request(batch,
                                            features=feature_names,
                                            tensor=args.tensor)
            GRPC_response = gen_GRPC_request(response,
                                             features=response_names,
                                             tensor=args.tensor)
            reward = 1.0
            GRPC_feedback = prediction_pb2.Feedback(request=GRPC_request,
                                                    response=GRPC_response,
                                                    reward=reward)

            if args.prnt:
                print(GRPC_feedback)

            channel = grpc.insecure_channel('{}:{}'.format(
                args.host, args.port))
            stub = prediction_pb2_grpc.ModelStub(channel)
            response = stub.SendFeedback(GRPC_feedback)

            if args.prnt:
                print("RECEIVED RESPONSE:")
                print()
示例#7
0
def run_send_feedback(args):
    contract = json.load(open(args.contract, 'r'))
    contract = unfold_contract(contract)
    feature_names = [feature["name"] for feature in contract["features"]]
    response_names = [feature["name"] for feature in contract["targets"]]

    REST_url = "http://" + args.host + ":" + str(args.port) + "/send-feedback"

    for i in range(args.n_requests):
        batch = generate_batch(contract, args.batch_size, 'features')
        response = generate_batch(contract, args.batch_size, 'targets')
        if args.prnt:
            print('-' * 40)
            print("SENDING NEW REQUEST:")

        if not args.grpc:
            REST_request = gen_REST_request(batch,
                                            features=feature_names,
                                            tensor=args.tensor)
            REST_response = gen_REST_request(response,
                                             features=response_names,
                                             tensor=args.tensor)
            reward = 1.0
            REST_feedback = {
                "request": REST_request,
                "response": REST_response,
                "reward": reward
            }
            if args.prnt:
                print(REST_feedback)

            if args.oauth_key:
                token = get_token(args)
                headers = {'Authorization': 'Bearer ' + token}
                response = requests.post("http://" + args.host + ":" +
                                         str(args.port) + "/api/v0.1/feedback",
                                         json=REST_feedback,
                                         headers=headers)
            else:
                response = requests.post(
                    "http://" + args.host + ":" + str(args.port) +
                    args.ambassador_path + "/api/v0.1/feedback",
                    json=REST_feedback,
                    headers=headers)

            if args.prnt:
                print(response)

        elif args.grpc:
            GRPC_request = gen_GRPC_request(batch,
                                            features=feature_names,
                                            tensor=args.tensor)
            GRPC_response = gen_GRPC_request(response,
                                             features=response_names,
                                             tensor=args.tensor)
            reward = 1.0
            GRPC_feedback = prediction_pb2.Feedback(request=GRPC_request,
                                                    response=GRPC_response,
                                                    reward=reward)

            if args.prnt:
                print(GRPC_feedback)

            channel = grpc.insecure_channel('{}:{}'.format(
                args.host, args.port))
            stub = prediction_pb2_grpc.SeldonStub(channel)

            if args.oauth_key:
                token = get_token(args)
                metadata = [('oauth_token', token)]
                response = stub.SendFeedback(request=GRPC_feedback,
                                             metadata=metadata)
            else:
                response = stub.SendFeedback(request=GRPC_feedback)

            if args.prnt:
                print("RECEIVED RESPONSE:")
                print()