def test_infer_dynamic_network_with_set_blob_twice(): from conftest import create_encoder import ngraph as ng shape, p_shape = [1, 4, 20, 20], [(0, 5), 4, 20, 20] ref_shape1, ref_shape2 = [2, 4, 20, 20], [3, 4, 20, 20] function = create_encoder(shape) net = ng.function_to_cnn(function) net.reshape({"data": p_shape}) ie_core = ie.IECore() ie_core.register_plugin("templatePlugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") request = exec_net.requests[0] td = request.input_blobs['data'].tensor_desc td.dims = ref_shape1 blob = ie.Blob(td) request.set_blob("data", blob) request.infer({"data": np.ones(ref_shape1)}) assert exec_net.requests[0].input_blobs[ "data"].tensor_desc.dims == ref_shape1 assert request.output_blobs['out'].tensor_desc.dims == ref_shape1 td = request.input_blobs['data'].tensor_desc td.dims = ref_shape2 blob = ie.Blob(td) request.set_blob("data", blob) request.infer({"data": np.ones(ref_shape2)}) assert exec_net.requests[0].input_blobs[ "data"].tensor_desc.dims == ref_shape2 assert request.output_blobs['out'].tensor_desc.dims == ref_shape2
def test_set_blob_with_incorrect_name(): function = create_encoder([4, 4, 20, 20]) net = ng.function_to_cnn(function) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") tensor_desc = exec_net.requests[0].input_blobs["data"].tensor_desc tensor_desc.dims = [4, 4, 20, 20] blob = ie.Blob(tensor_desc) with pytest.raises(RuntimeError) as e: exec_net.requests[0].set_blob("incorrect_name", blob) assert f"Failed to find input or output with name: 'incorrect_name'" in str( e.value)
def test_async_infer_dynamic_network_3_requests(shapes): function = create_encoder([3, 4, 20, 20]) net = ng.function_to_cnn(function) net.reshape({"data": [3, 4, (20, 50), (20, 50)]}) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE", num_requests=3) for i, request in enumerate(exec_net.requests): request.async_infer({"data": np.ones(shapes[i])}) for i, request in enumerate(exec_net.requests): status = request.wait(ie.WaitMode.RESULT_READY) assert status == ie.StatusCode.OK assert request.output_blobs['out'].tensor_desc.dims == shapes[i]
def test_blob_set_shape_after_async_infer(): function = create_encoder([1, 4, 20, 20]) net = ng.function_to_cnn(function) net.reshape({"data": [(1, 5), 4, 20, 20]}) ie_core = IECore() ie_core.register_plugin("templatePlugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") request = exec_net.requests[0] request.async_infer({"data": np.ones([4, 4, 20, 20])}) with pytest.raises(RuntimeError) as e: request.input_blobs['data'].set_shape([3, 4, 20, 20]) assert "REQUEST_BUSY" in str(e.value) request.wait()
def test_infer_dynamic_network_without_set_shape(shape, p_shape, ref_shape): function = create_encoder(shape) net = ng.function_to_cnn(function) net.reshape({"data": p_shape}) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") exec_net.infer({"data": np.ones(ref_shape)}) assert exec_net.requests[0].input_blobs[ "data"].tensor_desc.dims == ref_shape request = exec_net.requests[0] request.async_infer({"data": np.ones(ref_shape)}) status = request.wait(ie.WaitMode.RESULT_READY) assert status == ie.StatusCode.OK assert request.output_blobs['out'].tensor_desc.dims == ref_shape
def test_set_blob_after_async_infer(): function = create_encoder([1, 4, 20, 20]) net = ng.function_to_cnn(function) net.reshape({"data": [(0, 5), 4, 20, 20]}) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") request = exec_net.requests[0] tensor_desc = request.input_blobs['data'].tensor_desc tensor_desc.dims = [2, 4, 20, 20] blob = ie.Blob(tensor_desc) request.async_infer({"data": np.ones([4, 4, 20, 20])}) with pytest.raises(RuntimeError) as e: request.set_blob("data", blob) assert "REQUEST_BUSY" in str(e.value) request.wait()
def test_set_blob_with_incorrect_size(): function = create_encoder([4, 4, 20, 20]) net = ng.function_to_cnn(function) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") tensor_desc = exec_net.requests[0].input_blobs["data"].tensor_desc tensor_desc.dims = [tensor_desc.dims[0] * 2, 4, 20, 20] blob = ie.Blob(tensor_desc) print(exec_net.requests[0].output_blobs) with pytest.raises(RuntimeError) as e: exec_net.requests[0].set_blob("data", blob) assert f"Input blob size is not equal network input size" in str(e.value) with pytest.raises(RuntimeError) as e: exec_net.requests[0].set_blob("out", blob) assert f"Output blob size is not equal network output size" in str(e.value)
def test_infer_dynamic_network_twice(): shape, p_shape = [1, 4, 20, 20], [(0, 5), 4, 20, 20] ref_shape1, ref_shape2 = [2, 4, 20, 20], [3, 4, 20, 20] function = create_encoder(shape) net = ng.function_to_cnn(function) net.reshape({"data": p_shape}) ie_core = ie.IECore() ie_core.register_plugin("ov_template_plugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") request = exec_net.requests[0] request.infer({"data": np.ones(ref_shape1)}) assert exec_net.requests[0].input_blobs[ "data"].tensor_desc.dims == ref_shape1 assert request.output_blobs['out'].tensor_desc.dims == ref_shape1 request.infer({"data": np.ones(ref_shape2)}) assert exec_net.requests[0].input_blobs[ "data"].tensor_desc.dims == ref_shape2 assert request.output_blobs['out'].tensor_desc.dims == ref_shape2
def test_infer_dynamic_network_with_set_blob(shape, p_shape, ref_shape): from conftest import create_encoder import ngraph as ng function = create_encoder(shape) net = ng.function_to_cnn(function) net.reshape({"data": p_shape}) ie_core = ie.IECore() ie_core.register_plugin("templatePlugin", "TEMPLATE") exec_net = ie_core.load_network(net, "TEMPLATE") tensor_desc = exec_net.requests[0].input_blobs["data"].tensor_desc tensor_desc.dims = ref_shape blob = ie.Blob(tensor_desc) exec_net.requests[0].set_blob("data", blob) assert exec_net.requests[0].input_blobs[ "data"].tensor_desc.dims == ref_shape request = exec_net.requests[0] request.infer({"data": np.ones(ref_shape)}) request.async_infer({"data": np.ones(ref_shape)}) status = request.wait(ie.WaitMode.RESULT_READY) assert status == ie.StatusCode.OK assert request.output_blobs["out"].tensor_desc.dims == ref_shape