def test_ie_core_class(): input_shape = [1, 3, 4, 4] param = ng.parameter(input_shape, np.float32, name="parameter") relu = ng.relu(param, name="relu") func = Function([relu], [param], "test") func.get_ordered_ops()[2].friendly_name = "friendly" cnn_network = ov.IENetwork(func) ie_core = ov.Core() ie_core.set_config({}, device_name="CPU") executable_network = ie_core.load_network(cnn_network, "CPU", {}) td = TensorDesc("FP32", input_shape, "NCHW") # from IPython import embed; embed() request = executable_network.create_infer_request() input_data = np.random.rand(*input_shape) - 0.5 expected_output = np.maximum(0.0, input_data) input_blob = Blob(td, input_data) request.set_input({"parameter": input_blob}) request.infer() result = request.get_blob("relu").buffer assert np.allclose(result, expected_output)
def create_relu(input_shape): import ngraph as ng input_shape = ng.impl.PartialShape(input_shape) param = ng.parameter(input_shape, dtype=np.float32, name="data") result = ng.relu(param, name="out") function = ng.Function(result, [param], "TestFunction") return function
def simple_if(condition_val): condition = ng.constant(condition_val, dtype=np.bool) # then_body X_t = ng.parameter([2], np.float32, "X") Y_t = ng.parameter([2], np.float32, "Y") then_mul = ng.multiply(X_t, Y_t) then_body_res_1 = ng.result(then_mul) then_body = GraphBody([X_t, Y_t], [then_body_res_1]) then_body_inputs = [ TensorIteratorInvariantInputDesc(1, 0), TensorIteratorInvariantInputDesc(2, 1) ] then_body_outputs = [TensorIteratorBodyOutputDesc(0, 0)] # else_body X_e = ng.parameter([2], np.float32, "X") Y_e = ng.parameter([2], np.float32, "Y") add_e = ng.add(X_e, Y_e) else_body_res_1 = ng.result(add_e) else_body = GraphBody([X_e, Y_e], [else_body_res_1]) else_body_inputs = [ TensorIteratorInvariantInputDesc(1, 0), TensorIteratorInvariantInputDesc(2, 1) ] else_body_outputs = [TensorIteratorBodyOutputDesc(0, 0)] X = ng.constant([3, 4], dtype=np.float32) Y = ng.constant([2, 1], dtype=np.float32) if_node = ng.if_op(condition, [X, Y], (then_body, else_body), (then_body_inputs, else_body_inputs), (then_body_outputs, else_body_outputs)) relu = ng.relu(if_node) return relu
def get_test_cnnnetwork(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = ng.relu(param) func = Function([relu], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None return cnnNetwork
def test_create_IENetwork_from_nGraph(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = ng.relu(param) func = Function([relu], [param], "test") cnnNetwork = ov.IENetwork(func) assert cnnNetwork is not None func2 = cnnNetwork.get_function() assert func2 is not None assert len(func2.get_ops()) == 3
def test_CreateIENetworkFromNGraph(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = ng.relu(param) func = Function([relu], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None assert ng.function_from_cnn(cnnNetwork) != None assert len(cnnNetwork.layers) == 2
def get_test_cnnnetwork(): param = ng.parameter(Shape([1, 3, 22, 22]), name="parameter") relu = ng.relu(param) res = ng.result(relu, name='result') func = Function([res], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None return cnnNetwork
def test_runtime_info(): test_shape = PartialShape([1, 1, 1, 1]) test_type = Type.f32 test_param = Parameter(test_type, test_shape) relu_node = ng.relu(test_param) runtime_info = relu_node.get_rt_info() runtime_info["affinity"] = "test_affinity" relu_node.set_friendly_name("testReLU") runtime_info_after = relu_node.get_rt_info() assert runtime_info_after["affinity"] == "test_affinity"
def test_get_IENetwork_from_nGraph(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = ng.relu(param) func = Function([relu], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None assert ng.function_from_cnn(cnnNetwork) != None func2 = ng.function_from_cnn(cnnNetwork) assert func2 != None
def test_offline_api(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = ng.relu(param) func = Function([relu], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None ApplyMOCTransformations(cnnNetwork, False) func2 = ng.function_from_cnn(cnnNetwork) assert func2 != None assert len(func2.get_ops()) == 3
def test_mutiple_outputs(): input_shape = [4, 4] input_data = np.arange(-8, 8).reshape(input_shape) expected_output = np.split(input_data, 2, axis=1)[0] expected_output[expected_output < 0] = 0 test_param = ng.parameter(input_shape, dtype=np.float32, name="A") split = ng.split(test_param, axis=1, num_splits=2) split_first_output = split.output(0) relu = ng.relu(split_first_output) runtime = get_runtime() computation = runtime.computation(relu, test_param) output = computation(input_data) assert np.equal(output, expected_output).all()
def simple_if_without_parameters(condition_val): condition = ng.constant(condition_val, dtype=np.bool) # then_body then_constant = ng.constant(0.7, dtype=np.float) then_body_res_1 = ng.result(then_constant) then_body = GraphBody([], [then_body_res_1]) then_body_inputs = [] then_body_outputs = [TensorIteratorBodyOutputDesc(0, 0)] # else_body else_const = ng.constant(9.0, dtype=np.float) else_body_res_1 = ng.result(else_const) else_body = GraphBody([], [else_body_res_1]) else_body_inputs = [] else_body_outputs = [TensorIteratorBodyOutputDesc(0, 0)] if_node = ng.if_op(condition, [], (then_body, else_body), (then_body_inputs, else_body_inputs), (then_body_outputs, else_body_outputs)) relu = ng.relu(if_node) return relu
def test_runtime_info(): test_shape = PartialShape([1, 3, 22, 22]) test_type = Type.f32 test_param = Parameter(test_type, test_shape) relu_node = ng.relu(test_param) runtime_info = relu_node.get_rt_info() runtime_info["affinity"] = "test_affinity" relu_node.set_friendly_name("testReLU") runtime_info_after = relu_node.get_rt_info() assert runtime_info == runtime_info_after params = [test_param] results = [relu_node] ng_function = Function(results, params, "testFunc") capsule = Function.to_capsule(ng_function) cnn_network = IENetwork(capsule) cnn_layer = cnn_network.layers["testReLU"] assert cnn_layer is not None assert cnn_layer.affinity == "test_affinity"
def create_ngraph_function(args) -> Function: weights = np.fromfile(args.model, dtype=np.float32) weights_offset = 0 padding_begin = [0, 0] padding_end = [0, 0] # input input_shape = [64, 1, 28, 28] param_node = ngraph.parameter(input_shape, np.float32, 'Parameter') # convolution 1 conv_1_kernel_shape, conv_1_kernel_length = shape_and_length([20, 1, 5, 5]) conv_1_kernel = ngraph.constant( weights[0:conv_1_kernel_length].reshape(conv_1_kernel_shape)) weights_offset += conv_1_kernel_length conv_1_node = ngraph.convolution(param_node, conv_1_kernel, [1, 1], padding_begin, padding_end, [1, 1]) # add 1 add_1_kernel_shape, add_1_kernel_length = shape_and_length([1, 20, 1, 1]) add_1_kernel = ngraph.constant( weights[weights_offset:weights_offset + add_1_kernel_length].reshape(add_1_kernel_shape)) weights_offset += add_1_kernel_length add_1_node = ngraph.add(conv_1_node, add_1_kernel) # maxpool 1 maxpool_1_node = ngraph.max_pool(add_1_node, [2, 2], padding_begin, padding_end, [2, 2], 'ceil', None) # convolution 2 conv_2_kernel_shape, conv_2_kernel_length = shape_and_length( [50, 20, 5, 5]) conv_2_kernel = ngraph.constant( weights[weights_offset:weights_offset + conv_2_kernel_length].reshape(conv_2_kernel_shape)) weights_offset += conv_2_kernel_length conv_2_node = ngraph.convolution(maxpool_1_node, conv_2_kernel, [1, 1], padding_begin, padding_end, [1, 1]) # add 2 add_2_kernel_shape, add_2_kernel_length = shape_and_length([1, 50, 1, 1]) add_2_kernel = ngraph.constant( weights[weights_offset:weights_offset + add_2_kernel_length].reshape(add_2_kernel_shape)) weights_offset += add_2_kernel_length add_2_node = ngraph.add(conv_2_node, add_2_kernel) # maxpool 2 maxpool_2_node = ngraph.max_pool(add_2_node, [2, 2], padding_begin, padding_end, [2, 2], 'ceil', None) # reshape 1 reshape_1_dims, reshape_1_length = shape_and_length([2]) # workaround to get int64 weights from float32 ndarray w/o unnecessary copying dtype_weights = np.frombuffer(weights[weights_offset:weights_offset + 2 * reshape_1_length], dtype=np.int64) reshape_1_kernel = ngraph.constant(dtype_weights) weights_offset += 2 * reshape_1_length reshape_1_node = ngraph.reshape(maxpool_2_node, reshape_1_kernel, True) # matmul 1 matmul_1_kernel_shape, matmul_1_kernel_length = shape_and_length( [500, 800]) matmul_1_kernel = ngraph.constant( weights[weights_offset:weights_offset + matmul_1_kernel_length].reshape(matmul_1_kernel_shape)) weights_offset += matmul_1_kernel_length matmul_1_node = ngraph.matmul(reshape_1_node, matmul_1_kernel, False, True) # add 3 add_3_kernel_shape, add_3_kernel_length = shape_and_length([1, 500]) add_3_kernel = ngraph.constant( weights[weights_offset:weights_offset + add_3_kernel_length].reshape(add_3_kernel_shape)) weights_offset += add_3_kernel_length add_3_node = ngraph.add(matmul_1_node, add_3_kernel) # ReLU relu_node = ngraph.relu(add_3_node) # reshape 2 reshape_2_kernel = ngraph.constant(dtype_weights) reshape_2_node = ngraph.reshape(relu_node, reshape_2_kernel, True) # matmul 2 matmul_2_kernel_shape, matmul_2_kernel_length = shape_and_length([10, 500]) matmul_2_kernel = ngraph.constant( weights[weights_offset:weights_offset + matmul_2_kernel_length].reshape(matmul_2_kernel_shape)) weights_offset += matmul_2_kernel_length matmul_2_node = ngraph.matmul(reshape_2_node, matmul_2_kernel, False, True) # add 4 add_4_kernel_shape, add_4_kernel_length = shape_and_length([1, 10]) add_4_kernel = ngraph.constant( weights[weights_offset:weights_offset + add_4_kernel_length].reshape(add_4_kernel_shape)) weights_offset += add_4_kernel_length add_4_node = ngraph.add(matmul_2_node, add_4_kernel) # softmax softmax_axis = 1 softmax_node = ngraph.softmax(add_4_node, softmax_axis) # result result_node = ngraph.result(softmax_node) # nGraph function function = Function(result_node, [param_node], 'lenet') return function
def Relu(onnx_node, ng_inputs): # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode """Apply the Relu function, f(x) = max(0, x) to the input tensor elementwise.""" return ng.relu(ng_inputs[0])
def create_ngraph_function(args: argparse.Namespace) -> ngraph.impl.Function: """Create a network on the fly from the source code using ngraph""" def shape_and_length(shape: list) -> typing.Tuple[list, int]: length = reduce(lambda x, y: x * y, shape) return shape, length weights = np.fromfile(args.model, dtype=np.float32) weights_offset = 0 padding_begin = padding_end = [0, 0] # input input_shape = [64, 1, 28, 28] param_node = ngraph.parameter(input_shape, np.float32, 'Parameter') # convolution 1 conv_1_kernel_shape, conv_1_kernel_length = shape_and_length([20, 1, 5, 5]) conv_1_kernel = ngraph.constant( weights[0:conv_1_kernel_length].reshape(conv_1_kernel_shape)) weights_offset += conv_1_kernel_length conv_1_node = ngraph.convolution(param_node, conv_1_kernel, [1, 1], padding_begin, padding_end, [1, 1]) # add 1 add_1_kernel_shape, add_1_kernel_length = shape_and_length([1, 20, 1, 1]) add_1_kernel = ngraph.constant( weights[weights_offset:weights_offset + add_1_kernel_length].reshape(add_1_kernel_shape), ) weights_offset += add_1_kernel_length add_1_node = ngraph.add(conv_1_node, add_1_kernel) # maxpool 1 maxpool_1_node = ngraph.max_pool(add_1_node, [2, 2], padding_begin, padding_end, [2, 2], 'ceil', None) # convolution 2 conv_2_kernel_shape, conv_2_kernel_length = shape_and_length( [50, 20, 5, 5]) conv_2_kernel = ngraph.constant( weights[weights_offset:weights_offset + conv_2_kernel_length].reshape(conv_2_kernel_shape), ) weights_offset += conv_2_kernel_length conv_2_node = ngraph.convolution(maxpool_1_node, conv_2_kernel, [1, 1], padding_begin, padding_end, [1, 1]) # add 2 add_2_kernel_shape, add_2_kernel_length = shape_and_length([1, 50, 1, 1]) add_2_kernel = ngraph.constant( weights[weights_offset:weights_offset + add_2_kernel_length].reshape(add_2_kernel_shape), ) weights_offset += add_2_kernel_length add_2_node = ngraph.add(conv_2_node, add_2_kernel) # maxpool 2 maxpool_2_node = ngraph.max_pool(add_2_node, [2, 2], padding_begin, padding_end, [2, 2], 'ceil', None) # reshape 1 reshape_1_dims, reshape_1_length = shape_and_length([2]) # workaround to get int64 weights from float32 ndarray w/o unnecessary copying dtype_weights = np.frombuffer( weights[weights_offset:weights_offset + 2 * reshape_1_length], dtype=np.int64, ) reshape_1_kernel = ngraph.constant(dtype_weights) weights_offset += 2 * reshape_1_length reshape_1_node = ngraph.reshape(maxpool_2_node, reshape_1_kernel, True) # matmul 1 matmul_1_kernel_shape, matmul_1_kernel_length = shape_and_length( [500, 800]) matmul_1_kernel = ngraph.constant( weights[weights_offset:weights_offset + matmul_1_kernel_length].reshape(matmul_1_kernel_shape), ) weights_offset += matmul_1_kernel_length matmul_1_node = ngraph.matmul(reshape_1_node, matmul_1_kernel, False, True) # add 3 add_3_kernel_shape, add_3_kernel_length = shape_and_length([1, 500]) add_3_kernel = ngraph.constant( weights[weights_offset:weights_offset + add_3_kernel_length].reshape(add_3_kernel_shape), ) weights_offset += add_3_kernel_length add_3_node = ngraph.add(matmul_1_node, add_3_kernel) # ReLU relu_node = ngraph.relu(add_3_node) # reshape 2 reshape_2_kernel = ngraph.constant(dtype_weights) reshape_2_node = ngraph.reshape(relu_node, reshape_2_kernel, True) # matmul 2 matmul_2_kernel_shape, matmul_2_kernel_length = shape_and_length([10, 500]) matmul_2_kernel = ngraph.constant( weights[weights_offset:weights_offset + matmul_2_kernel_length].reshape(matmul_2_kernel_shape), ) weights_offset += matmul_2_kernel_length matmul_2_node = ngraph.matmul(reshape_2_node, matmul_2_kernel, False, True) # add 4 add_4_kernel_shape, add_4_kernel_length = shape_and_length([1, 10]) add_4_kernel = ngraph.constant( weights[weights_offset:weights_offset + add_4_kernel_length].reshape(add_4_kernel_shape), ) weights_offset += add_4_kernel_length add_4_node = ngraph.add(matmul_2_node, add_4_kernel) # softmax softmax_axis = 1 softmax_node = ngraph.softmax(add_4_node, softmax_axis) # result result_node = ngraph.result(softmax_node) return ngraph.impl.Function(result_node, [param_node], 'lenet')