Пример #1
0
def test_model_infer(inference_service_stub, model_infer_request):
    prediction = inference_service_stub.ModelInfer(model_infer_request)

    expected = pb.InferTensorContents(fp32_contents=[21.0])

    assert len(prediction.outputs) == 1
    assert prediction.outputs[0].contents == expected
Пример #2
0
async def test_model_infer(inference_service_stub, model_infer_request,
                           model_name, model_version):
    model_infer_request.model_name = model_name
    if model_version is not None:
        model_infer_request.model_version = model_version
    else:
        model_infer_request.ClearField("model_version")

    prediction = await inference_service_stub.ModelInfer(model_infer_request)

    expected = pb.InferTensorContents(int64_contents=[6])

    assert len(prediction.outputs) == 1
    assert prediction.outputs[0].contents == expected
Пример #3
0
def test_modelinferresponse_from_types(inference_response):
    model_infer_response = ModelInferResponseConverter.from_types(inference_response)

    expected = pb.ModelInferResponse(
        model_name="sum-model",
        id="123",
        outputs=[
            pb.ModelInferResponse.InferOutputTensor(
                name="output-0",
                datatype="FP32",
                shape=[1],
                contents=pb.InferTensorContents(fp32_contents=[21.0]),
            )
        ],
    )

    assert type(model_infer_response) is pb.ModelInferResponse
    assert json_format.MessageToDict(model_infer_response) == json_format.MessageToDict(
        expected
    )