def test_CreateIENetworkFromNGraph(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = Relu(param) func = Function([relu], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None assert cnnNetwork.get_function() != None assert len(cnnNetwork.layers) == 2
def get_test_cnnnetwork(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = ng.relu(param) func = Function([relu], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None return cnnNetwork
def get_test_cnnnetwork(): param = ng.parameter(Shape([1, 3, 22, 22]), name="parameter") relu = ng.relu(param) res = ng.result(relu, name='result') func = Function([res], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None return cnnNetwork
def __call__(self, *input_values: NumericData) -> List[NumericData]: """Run computation on input values and return result.""" input_values = [np.array(input_value) for input_value in input_values] input_shapes = [get_shape(input_value) for input_value in input_values] param_names = [param.friendly_name for param in self.parameters] if self.network_cache.get(str(input_shapes)) is None: capsule = Function.to_capsule(self.function) cnn_network = IENetwork(capsule) if self.function.is_dynamic(): cnn_network.reshape(dict(zip(param_names, input_shapes))) # Convert unsupported inputs of the network _convert_inputs(cnn_network) self.network_cache[str(input_shapes)] = cnn_network else: cnn_network = self.network_cache[str(input_shapes)] executable_network = self.runtime.backend.load_network( cnn_network, self.runtime.backend_name) # Input validation if len(input_values) != len(self.parameters): raise UserInputError("Expected %s parameters, received %s.", len(self.parameters), len(input_values)) for parameter, input in zip(self.parameters, input_values): parameter_shape = parameter.get_output_partial_shape(0) input_shape = PartialShape(input.shape) if len(input.shape) > 0 and not parameter_shape.compatible( input_shape): raise UserInputError( "Provided tensor's shape: %s does not match the expected: %s.", input_shape, parameter_shape, ) request = executable_network.requests[0] request.infer(dict(zip(param_names, input_values))) # Set order of output blobs compatible with nG Function result_buffers = [ self.__get_ie_output_blob_buffer(request.output_blobs, result) for result in self.results ] # Since OV overwrite result data type we have to convert results to the original one. original_dtypes = [ get_dtype(result.get_output_element_type(0)) for result in self.results ] converted_buffers = [ buffer.astype(original_dtype) for buffer, original_dtype in zip(result_buffers, original_dtypes) ] return converted_buffers
def __init__(self, runtime: Runtime, ng_function: Function) -> None: ie = runtime.backend self.runtime = runtime self.function = ng_function self.parameters = ng_function.get_parameters() self.results = ng_function.get_results() capsule = Function.to_capsule(ng_function) cnn_network = IENetwork(capsule) self.executable_network = ie.load_network(cnn_network, self.runtime.backend_name)
def test_get_IENetwork_from_nGraph(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = ng.relu(param) func = Function([relu], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None assert ng.function_from_cnn(cnnNetwork) != None func2 = ng.function_from_cnn(cnnNetwork) assert func2 != None
def create_function_with_memory(input_shape, data_type): from ngraph.impl import Function, Type import ngraph as ng input_data = ng.parameter(input_shape, name="input_data", dtype=data_type) rv = ng.read_value(input_data, "var_id_667") add = ng.add(rv, input_data, name="MemoryAdd") node = ng.assign(add, "var_id_667") res = ng.result(add, "res") func = Function(results=[res], sinks=[node], parameters=[input_data], name="name") caps = Function.to_capsule(func) return caps
def test_GetIENetworkFromNGraph(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = Relu(param) func = Function([relu], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None assert cnnNetwork.get_function() != None caps2 = cnnNetwork.get_function() func2 = Function.from_capsule(caps2) assert func2 != None
def test_offline_api(): element_type = Type.f32 param = Parameter(element_type, Shape([1, 3, 22, 22])) relu = ng.relu(param) func = Function([relu], [param], 'test') caps = Function.to_capsule(func) cnnNetwork = IENetwork(caps) assert cnnNetwork != None ApplyMOCTransformations(cnnNetwork, False) func2 = ng.function_from_cnn(cnnNetwork) assert func2 != None assert len(func2.get_ops()) == 3
def __call__(self, *input_values: NumericData) -> List[NumericData]: """Run computation on input values and return result.""" input_values = [np.array(input_value) for input_value in input_values] input_shapes = [get_shape(input_value) for input_value in input_values] if self.network_cache.get(str(input_shapes)) is None: capsule = Function.to_capsule(self.function) cnn_network = IENetwork(capsule) if self.function.is_dynamic(): param_names = [ param.friendly_name for param in self.parameters ] cnn_network.reshape(dict(zip(param_names, input_shapes))) self.network_cache[str(input_shapes)] = cnn_network else: cnn_network = self.network_cache[str(input_shapes)] executable_network = self.runtime.backend.load_network( cnn_network, self.runtime.backend_name) # Input validation if len(input_values) != len(self.parameters): raise UserInputError("Expected %s parameters, received %s.", len(self.parameters), len(input_values)) for parameter, input in zip(self.parameters, input_values): parameter_shape = parameter.get_output_partial_shape(0) input_shape = PartialShape(input.shape) if len(input.shape) > 0 and not parameter_shape.compatible( input_shape): raise UserInputError( "Provided tensor's shape: %s does not match the expected: %s.", input_shape, parameter_shape, ) request = executable_network.requests[0] request.infer(dict(zip(request._inputs_list, input_values))) return [blob.buffer for blob in request.output_blobs.values()]
def test_runtime_info(): test_shape = PartialShape([1, 3, 22, 22]) test_type = Type.f32 test_param = Parameter(test_type, test_shape) relu_node = ng.relu(test_param) runtime_info = relu_node.get_rt_info() runtime_info["affinity"] = "test_affinity" relu_node.set_friendly_name("testReLU") runtime_info_after = relu_node.get_rt_info() assert runtime_info == runtime_info_after params = [test_param] results = [relu_node] ng_function = Function(results, params, "testFunc") capsule = Function.to_capsule(ng_function) cnn_network = IENetwork(capsule) cnn_layer = cnn_network.layers["testReLU"] assert cnn_layer is not None assert cnn_layer.affinity == "test_affinity"
def main(): log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.INFO, stream=sys.stdout) args = build_argparser().parse_args() input_images = list_input_images(args.input) # Loading network using ngraph function ngraph_function = create_ngraph_function(args) net = IENetwork(Function.to_capsule(ngraph_function)) assert len(net.input_info.keys() ) == 1, "Sample supports only single input topologies" assert len( net.outputs) == 1, "Sample supports only single output topologies" log.info("Preparing input blobs") input_blob = next(iter(net.input_info)) out_blob = next(iter(net.outputs)) net.batch_size = len(input_images) # Read and pre-process input images n, c, h, w = net.input_info[input_blob].input_data.shape images = np.ndarray(shape=(n, c, h, w)) for i in range(n): image = read_image(input_images[i]) assert image is not None, log.error( f"Can't open an image {input_images[i]}") assert len(image.shape) == 2, log.error( 'Sample supports images with 1 channel only') if image.shape[:] != (w, h): log.warning( f"Image {input_images[i]} is resized from {image.shape[:]} to {(w, h)}" ) image = cv2.resize(image, (w, h)) images[i] = image log.info(f"Batch size is {n}") log.info("Creating Inference Engine") ie = IECore() log.info('Loading model to the device') exec_net = ie.load_network(network=net, device_name=args.device.upper()) # Start sync inference log.info('Creating infer request and starting inference') res = exec_net.infer(inputs={input_blob: images}) # Processing results log.info("Processing output blob") res = res[out_blob] log.info(f"Top {args.number_top} results: ") # Read labels file if it is provided as argument labels_map = None if args.labels: with open(args.labels, 'r') as f: labels_map = [x.split(sep=' ', maxsplit=1)[-1].strip() for x in f] classid_str = "classid" probability_str = "probability" for i, probs in enumerate(res): probs = np.squeeze(probs) top_ind = np.argsort(probs)[-args.number_top:][::-1] print(f"Image {input_images[i]}\n") print(classid_str, probability_str) print(f"{'-' * len(classid_str)} {'-' * len(probability_str)}") for class_id in top_ind: det_label = labels_map[class_id] if labels_map else f"{class_id}" label_length = len(det_label) space_num_before = (len(classid_str) - label_length) // 2 space_num_after = len(classid_str) - (space_num_before + label_length) + 2 print(f"{' ' * space_num_before}{det_label}" f"{' ' * space_num_after}{probs[class_id]:.7f}") print("\n") log.info('This sample is an API example, for any performance measurements ' 'please use the dedicated benchmark_app tool')
def function_to_cnn(ng_function: Function) -> Function: """Get Inference Engine CNN network from nGraph function.""" capsule = Function.to_capsule(ng_function) return IENetwork(capsule)
def read_network(path_to_xml): fe = fem.load_by_framework(framework="ir") f = fe.convert(fe.load(path_to_xml)) return IENetwork(Function.to_capsule(f))