def test_infer_dynamic_network_with_set_blob_twice(): from conftest import create_encoder import ngraph as ng shape, p_shape = [1, 4, 20, 20], [(0, 5), 4, 20, 20] ref_shape1, ref_shape2 = [2, 4, 20, 20], [3, 4, 20, 20] function = create_encoder(shape) net = ng.function_to_cnn(function) net.reshape({"data": p_shape}) ie_core = ie.IECore() ie_core.register_plugin("templatePlugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") request = exec_net.requests[0] td = request.input_blobs['data'].tensor_desc td.dims = ref_shape1 blob = ie.Blob(td) request.set_blob("data", blob) request.infer({"data": np.ones(ref_shape1)}) assert exec_net.requests[0].input_blobs[ "data"].tensor_desc.dims == ref_shape1 assert request.output_blobs['out'].tensor_desc.dims == ref_shape1 td = request.input_blobs['data'].tensor_desc td.dims = ref_shape2 blob = ie.Blob(td) request.set_blob("data", blob) request.infer({"data": np.ones(ref_shape2)}) assert exec_net.requests[0].input_blobs[ "data"].tensor_desc.dims == ref_shape2 assert request.output_blobs['out'].tensor_desc.dims == ref_shape2
def test_resize_algorithm_work(device): ie_core = ie.IECore() net = ie_core.read_network(test_net_xml, test_net_bin) exec_net_1 = ie_core.load_network(network=net, device_name=device, num_requests=1) img = read_image() res_1 = np.sort(exec_net_1.infer({"data": img})['fc_out']) net.input_info['data'].preprocess_info.resize_algorithm = ie.ResizeAlgorithm.RESIZE_BILINEAR exec_net_2 = ie_core.load_network(net, device) import cv2 image = cv2.imread(path_to_img) if image is None: raise FileNotFoundError("Input image not found") image = image / 255 image = image.transpose((2, 0, 1)).astype(np.float32) image = np.expand_dims(image, 0) tensor_desc = ie.TensorDesc("FP32", [1, 3, image.shape[2], image.shape[3]], "NCHW") img_blob = ie.Blob(tensor_desc, image) request = exec_net_2.requests[0] assert request.preprocess_info["data"].resize_algorithm == ie.ResizeAlgorithm.RESIZE_BILINEAR request.set_blob('data', img_blob) request.infer() res_2 = np.sort(request.output_blobs['fc_out'].buffer) assert np.allclose(res_1, res_2, atol=1e-2, rtol=1e-2)
def test_blob_setter(device): ie_core = ie.IECore() if device == "CPU": if ie_core.get_metric(device, "FULL_DEVICE_NAME") == "arm_compute::NEON": pytest.skip("Can't run on ARM plugin") net = ie_core.read_network(test_net_xml, test_net_bin) exec_net_1 = ie_core.load_network(network=net, device_name=device, num_requests=1) net.input_info['data'].layout = "NHWC" exec_net_2 = ie_core.load_network(network=net, device_name=device, num_requests=1) img = read_image() res_1 = np.sort(exec_net_1.infer({"data": img})['fc_out']) img = np.transpose(img, axes=(0, 2, 3, 1)).astype(np.float32) tensor_desc = ie.TensorDesc("FP32", [1, 3, 32, 32], "NHWC") img_blob = ie.Blob(tensor_desc, img) request = exec_net_2.requests[0] request.set_blob('data', img_blob) request.infer() res_2 = np.sort(request.output_blobs['fc_out'].buffer) assert np.allclose(res_1, res_2, atol=1e-2, rtol=1e-2)
def test_set_blob_with_incorrect_name(): function = create_encoder([4, 4, 20, 20]) net = ng.function_to_cnn(function) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") tensor_desc = exec_net.requests[0].input_blobs["data"].tensor_desc tensor_desc.dims = [4, 4, 20, 20] blob = ie.Blob(tensor_desc) with pytest.raises(RuntimeError) as e: exec_net.requests[0].set_blob("incorrect_name", blob) assert f"Failed to find input or output with name: 'incorrect_name'" in str(e.value)
def test_blob_setter_with_preprocess(device): ie_core = ie.IECore() net = ie_core.read_network(test_net_xml, test_net_bin) exec_net = ie_core.load_network(network=net, device_name=device, num_requests=1) img = read_image() tensor_desc = ie.TensorDesc("FP32", [1, 3, 32, 32], "NCHW") img_blob = ie.Blob(tensor_desc, img) preprocess_info = ie.PreProcessInfo() preprocess_info.mean_variant = ie.MeanVariant.MEAN_IMAGE request = exec_net.requests[0] request.set_blob('data', img_blob, preprocess_info) pp = request.preprocess_info["data"] assert pp.mean_variant == ie.MeanVariant.MEAN_IMAGE
def test_set_blob_after_async_infer(): function = create_encoder([1, 4, 20, 20]) net = ng.function_to_cnn(function) net.reshape({"data": [(0, 5), 4, 20, 20]}) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") request = exec_net.requests[0] tensor_desc = request.input_blobs['data'].tensor_desc tensor_desc.dims = [2, 4, 20, 20] blob = ie.Blob(tensor_desc) request.async_infer({"data": np.ones([4, 4, 20, 20])}) with pytest.raises(RuntimeError) as e: request.set_blob("data", blob) assert "REQUEST_BUSY" in str(e.value) request.wait()
def test_set_blob_with_incorrect_size(): function = create_encoder([4, 4, 20, 20]) net = ng.function_to_cnn(function) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") tensor_desc = exec_net.requests[0].input_blobs["data"].tensor_desc tensor_desc.dims = [tensor_desc.dims[0]*2, 4, 20, 20] blob = ie.Blob(tensor_desc) print(exec_net.requests[0].output_blobs) with pytest.raises(RuntimeError) as e: exec_net.requests[0].set_blob("data", blob) assert f"Input blob size is not equal network input size" in str(e.value) with pytest.raises(RuntimeError) as e: exec_net.requests[0].set_blob("out", blob) assert f"Output blob size is not equal network output size" in str(e.value)
def test_set_blob_after_async_infer(): from conftest import create_ngraph_function import ngraph as ng function = create_ngraph_function([ng.Dimension(0,5), ng.Dimension(4), ng.Dimension(20), ng.Dimension(20)]) net = ng.function_to_cnn(function) ie_core = ie.IECore() ie_core.register_plugin("templatePlugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") request = exec_net.requests[0] tensor_desc = request.input_blobs['data'].tensor_desc tensor_desc.dims = [2, 4, 20, 20] blob = ie.Blob(tensor_desc) request.async_infer({"data": np.ones([4, 4, 20, 20])}) with pytest.raises(RuntimeError) as e: request.set_blob("data", blob) assert "REQUEST_BUSY" in str(e.value)
def test_infer_dynamic_network_with_set_blob(shape, p_shape, ref_shape): function = create_encoder(shape) net = ng.function_to_cnn(function) net.reshape({"data": p_shape}) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") tensor_desc = exec_net.requests[0].input_blobs["data"].tensor_desc tensor_desc.dims = ref_shape blob = ie.Blob(tensor_desc) exec_net.requests[0].set_blob("data", blob) assert exec_net.requests[0].input_blobs["data"].tensor_desc.dims == ref_shape request = exec_net.requests[0] request.infer({"data": np.ones(ref_shape)}) request.async_infer({"data": np.ones(ref_shape)}) status = request.wait(ie.WaitMode.RESULT_READY) assert status == ie.StatusCode.OK assert request.output_blobs["out"].tensor_desc.dims == ref_shape
def test_blob_setter(device): ie_core = ie.IECore() net = ie_core.read_network(test_net_xml, test_net_bin) exec_net_1 = ie_core.load_network(network=net, device_name=device, num_requests=1) net.input_info['data'].layout = "NHWC" exec_net_2 = ie_core.load_network(network=net, device_name=device, num_requests=1) img = read_image() res_1 = np.sort(exec_net_1.infer({"data": img})['fc_out']) img = np.transpose(img, axes=(0, 2, 3, 1)).astype(np.float32) tensor_desc = ie.TensorDesc("FP32", [1, 3, 32, 32], "NHWC") img_blob = ie.Blob(tensor_desc, img) request = exec_net_2.requests[0] request.set_blob('data', img_blob) request.infer() res_2 = np.sort(request.output_blobs['fc_out'].buffer) assert np.allclose(res_1, res_2, atol=1e-2, rtol=1e-2)