Esempio n. 1
0
def grpc_request_ambassador(deploymentName,
                            namespace,
                            endpoint="localhost:8004",
                            data_size=5,
                            rows=1,
                            data=None,
                            headers=None):
    if data is None:
        shape, arr = create_random_data(data_size, rows)
    else:
        shape = data.shape
        arr = data.flatten()
    datadef = prediction_pb2.DefaultData(
        tensor=prediction_pb2.Tensor(shape=shape, values=arr))
    request = prediction_pb2.SeldonMessage(data=datadef)
    channel = grpc.insecure_channel(endpoint)
    stub = prediction_pb2_grpc.SeldonStub(channel)
    if namespace is None:
        metadata = [('seldon', deploymentName)]
    else:
        metadata = [('seldon', deploymentName), ('namespace', namespace)]
    if not headers is None:
        for k in headers:
            metadata.append((k, headers[k]))
    response = stub.Predict(request=request, metadata=metadata)
    return response
Esempio n. 2
0
def grpc_request_ambassador(
    deployment_name,
    namespace,
    endpoint="localhost:8004",
    data_size=5,
    rows=1,
    data=None,
):
    if data is None:
        shape, arr = create_random_data(data_size, rows)
    else:
        shape = data.shape
        arr = data.flatten()
    datadef = prediction_pb2.DefaultData(
        tensor=prediction_pb2.Tensor(shape=shape, values=arr))
    request = prediction_pb2.SeldonMessage(data=datadef)
    channel = grpc.insecure_channel(endpoint)
    stub = prediction_pb2_grpc.SeldonStub(channel)
    if namespace is None:
        metadata = [("seldon", deployment_name)]
    else:
        metadata = [("seldon", deployment_name), ("namespace", namespace)]
    try:
        response = stub.Predict(request=request, metadata=metadata)
        channel.close()
        return response
    except Exception as e:
        channel.close()
        raise e
Esempio n. 3
0
def grpc_request_ambassador_bindata(deploymentName,namespace,endpoint="localhost:8080",data=None):
    request = prediction_pb2.SeldonMessage(binData = data)
    channel = grpc.insecure_channel(endpoint)
    stub = prediction_pb2_grpc.SeldonStub(channel)
    if namespace is None:
        metadata = [('seldon',deploymentName)]
    else:
        metadata = [('seldon',deploymentName),('namespace',namespace)]
    response = stub.Predict(request=request,metadata=metadata)
    return response
def grpc_request_api_gateway(oauth_key,oauth_secret,namespace,rest_endpoint="localhost:8002",grpc_endpoint="localhost:8003",data_size=5,rows=1,data=None):
    token = get_token(oauth_key,oauth_secret,namespace,rest_endpoint)
    if data is None:
        shape, arr = create_random_data(data_size,rows)
    else:
        shape = data.shape
        arr = data.flatten()
    datadef = prediction_pb2.DefaultData(
            tensor = prediction_pb2.Tensor(
                shape = shape,
                values = arr
                )
            )
    request = prediction_pb2.SeldonMessage(data = datadef)
    channel = grpc.insecure_channel(grpc_endpoint)
    stub = prediction_pb2_grpc.SeldonStub(channel)
    metadata = [('oauth_token', token)]
    response = stub.Predict(request=request,metadata=metadata)
    return response
def grpc_request_ambassador_metadata(
    deployment_name, namespace, endpoint=API_AMBASSADOR, model_name=None
):
    if model_name is None:
        request = empty_pb2.Empty()
    else:
        request = prediction_pb2.SeldonModelMetadataRequest(name=model_name)
    channel = grpc.insecure_channel(endpoint)
    stub = prediction_pb2_grpc.SeldonStub(channel)
    if namespace is None:
        metadata = [("seldon", deployment_name)]
    else:
        metadata = [("seldon", deployment_name), ("namespace", namespace)]
    try:
        if model_name is None:
            response = stub.GraphMetadata(request=request, metadata=metadata)
        else:
            response = stub.ModelMetadata(request=request, metadata=metadata)
        channel.close()
        return response
    except Exception as e:
        channel.close()
        raise e
Esempio n. 6
0
def run_predict(args):
    contract = json.load(open(args.contract, 'r'))
    contract = unfold_contract(contract)
    feature_names = [feature["name"] for feature in contract["features"]]

    REST_url = "http://" + args.host + ":" + str(args.port) + "/predict"

    for i in range(args.n_requests):
        batch = generate_batch(contract, args.batch_size, 'features')
        if args.prnt:
            print('-' * 40)
            print("SENDING NEW REQUEST:")

        if not args.grpc:
            headers = {}
            REST_request = gen_REST_request(batch,
                                            features=feature_names,
                                            tensor=args.tensor)
            if args.prnt:
                print(REST_request)

            if args.oauth_key:
                token = get_token(args)
                headers = {'Authorization': 'Bearer ' + token}
                response = requests.post("http://" + args.host + ":" +
                                         str(args.port) +
                                         "/api/v0.1/predictions",
                                         json=REST_request,
                                         headers=headers)
            else:
                response = requests.post(
                    "http://" + args.host + ":" + str(args.port) +
                    args.ambassador_path + "/api/v0.1/predictions",
                    json=REST_request,
                    headers=headers)

            jresp = response.json()

            if args.prnt:
                print("RECEIVED RESPONSE:")
                print(jresp)
                print()
        else:
            GRPC_request = gen_GRPC_request(batch,
                                            features=feature_names,
                                            tensor=args.tensor)
            if args.prnt:
                print(GRPC_request)

            channel = grpc.insecure_channel('{}:{}'.format(
                args.host, args.port))
            stub = prediction_pb2_grpc.SeldonStub(channel)

            if args.oauth_key:
                token = get_token(args)
                metadata = [('oauth_token', token)]
                response = stub.Predict(request=GRPC_request,
                                        metadata=metadata)
            else:
                response = stub.Predict(request=GRPC_request)

            if args.prnt:
                print("RECEIVED RESPONSE:")
                print(response)
                print()
Esempio n. 7
0
def run_send_feedback(args):
    contract = json.load(open(args.contract, 'r'))
    contract = unfold_contract(contract)
    feature_names = [feature["name"] for feature in contract["features"]]
    response_names = [feature["name"] for feature in contract["targets"]]

    REST_url = "http://" + args.host + ":" + str(args.port) + "/send-feedback"

    for i in range(args.n_requests):
        batch = generate_batch(contract, args.batch_size, 'features')
        response = generate_batch(contract, args.batch_size, 'targets')
        if args.prnt:
            print('-' * 40)
            print("SENDING NEW REQUEST:")

        if not args.grpc:
            REST_request = gen_REST_request(batch,
                                            features=feature_names,
                                            tensor=args.tensor)
            REST_response = gen_REST_request(response,
                                             features=response_names,
                                             tensor=args.tensor)
            reward = 1.0
            REST_feedback = {
                "request": REST_request,
                "response": REST_response,
                "reward": reward
            }
            if args.prnt:
                print(REST_feedback)

            if args.oauth_key:
                token = get_token(args)
                headers = {'Authorization': 'Bearer ' + token}
                response = requests.post("http://" + args.host + ":" +
                                         str(args.port) + "/api/v0.1/feedback",
                                         json=REST_feedback,
                                         headers=headers)
            else:
                response = requests.post(
                    "http://" + args.host + ":" + str(args.port) +
                    args.ambassador_path + "/api/v0.1/feedback",
                    json=REST_feedback,
                    headers=headers)

            if args.prnt:
                print(response)

        elif args.grpc:
            GRPC_request = gen_GRPC_request(batch,
                                            features=feature_names,
                                            tensor=args.tensor)
            GRPC_response = gen_GRPC_request(response,
                                             features=response_names,
                                             tensor=args.tensor)
            reward = 1.0
            GRPC_feedback = prediction_pb2.Feedback(request=GRPC_request,
                                                    response=GRPC_response,
                                                    reward=reward)

            if args.prnt:
                print(GRPC_feedback)

            channel = grpc.insecure_channel('{}:{}'.format(
                args.host, args.port))
            stub = prediction_pb2_grpc.SeldonStub(channel)

            if args.oauth_key:
                token = get_token(args)
                metadata = [('oauth_token', token)]
                response = stub.SendFeedback(request=GRPC_feedback,
                                             metadata=metadata)
            else:
                response = stub.SendFeedback(request=GRPC_feedback)

            if args.prnt:
                print("RECEIVED RESPONSE:")
                print()