def test_modelinferrequest_to_types(model_infer_request): inference_request = ModelInferRequestConverter.to_types(model_infer_request) expected = types.InferenceRequest( id="", inputs=[ types.RequestInput( name="input-0", datatype="INT32", shape=[3], data=types.TensorData.parse_obj([1, 2, 3]), ), types.RequestInput( name="input-1", datatype="INT32", shape=[1], data=types.TensorData.parse_obj([4]), ), types.RequestInput( name="input-2", datatype="INT32", shape=[2], data=types.TensorData.parse_obj([5, 6]), ), ], ) assert type(inference_request) is types.InferenceRequest assert dict(inference_request) == dict(expected)
def generate_test_requests() -> List[types.InferenceRequest]: contents_lens = np.power(2, np.arange(10, 16)).astype(int) max_value = 9999 requests = [] for contents_len in contents_lens: inputs = max_value * np.random.rand(contents_len) requests.append( types.InferenceRequest(inputs=[ types.RequestInput( name="input-0", shape=[contents_len], datatype="FP32", data=types.TensorData.parse_obj(inputs.tolist()), ) ])) return requests
def test_modelinferrequest_to_types(model_infer_request): inference_request = ModelInferRequestConverter.to_types( model_infer_request) expected = types.InferenceRequest( id="", inputs=[ types.RequestInput( name="input-0", datatype="INT32", shape=[1, 3], data=types.TensorData.parse_obj([1, 2, 3]), parameters=types.Parameters(content_type="np"), ) ], ) assert type(inference_request) is types.InferenceRequest assert dict(inference_request) == dict(expected)
def extract_request(self) -> np.array: inputs = self.request["inputs"][0] default_codec = NumpyCodec() return default_codec.decode(types.RequestInput(**inputs))