def withoutproxy(ip, port, inputt):

    channel = grpc.insecure_channel('%s:%s' % (ip, port))
    stub = model_pb2_grpc.PredictServiceStub(channel)
    response = stub.Predict(
        model_pb2.input(inputStream=inputt, inputType="String"))

    channel = grpc.insecure_channel('%s:%s' % (ip, port))
    stub = model_pb2_grpc.PredictionServerStub(channel)
    response = stub.downstream(
        prediction_pb2.request(
            input_=model_pb2.input(inputType='string', inputStream=inputt)))

    time.sleep(1)
    return response.outputStream
def oursystem(ip, port, inputt):
    channel = grpc.insecure_channel('%s:%s' % (ip, port))
    stub = prediction_pb2_grpc.ProxyServerStub(channel)
    response = stub.downstream(
        prediction_pb2.request(
            input_=model_pb2.input(inputType='string', inputStream=inputt)))
    return response.status
def withoutproxy(ip, port, inputt):
    channel = grpc.insecure_channel('%s:%s' % (ip, port))
    stub = model_pb2_grpc.PredictServiceStub(channel)
    response = stub.Predict(
        model_pb2.input(inputStream=inputt, inputType="String"))

    return response.outputStream
Example #4
0
def produce(ip, port, seq_id):
    channel = grpc.insecure_channel('%s:%s' % (ip, port))
    stub = prediction_pb2_grpc.ProxyServerStub(channel)
    time = Timestamp()
    time.GetCurrentTime()
    response = stub.outputstream(
        prediction_pb2.request(input_=model_pb2.input(
            inputType='string', inputStream="Produced output"),
                               src_uri="localhost",
                               seq=seq_id,
                               req_id=1,
                               timestamp=time))
    print('Response\n{res}'.format(res=response.status))

    return response.status
Example #5
0
def consume(ip, port, inputt):

    inputt = str(inputt)

    channel = grpc.insecure_channel('%s:%s' % (ip, port))
    stub = prediction_pb2_grpc.ProxyServerStub(channel)

    a = datetime.datetime.now()
    response = stub.downstream(
        prediction_pb2.request(
            input_=model_pb2.input(inputType='string', inputStream=inputt)))
    b = datetime.datetime.now()

    print('latency', (b - a).microseconds / 1000, "ms")

    return response.status