def load_ov_model(path, device="CPU"): """ Load an OpenVINO IR model from an .xml file. Args: path (str): The path to the OpenVINO .xml file. device (str): The device to run inference, can be "CPU", "GPU" or "MULTI:CPU,GPU". Default to be "CPU". Returns: exec_net (OpenVINO executable net): executable OpenVINO model. input_blob (str): Input name. output_blob (str): Output name. """ try: from openvino import runtime as ov except ImportError: raise ImportError( "OpenVINO inference engine is not configured correctly.") core = ov.Core() if device.lower() == "cuda": device = "GPU" if device == "GPU": core.set_property( {"CACHE_DIR": os.path.dirname(os.path.abspath(path))}) model = core.read_model(model=path, weights=path.replace("xml", "bin")) compiled_model = core.compile_model(model=model, device_name=device.upper()) input_layer = compiled_model.inputs output_layer = compiled_model.outputs return compiled_model, input_layer, output_layer
# Copyright (C) 2018-2022 Intel Corporation # SPDX-License-Identifier: Apache-2.0 import numpy as np #! [import] import openvino.runtime as ov #! [import] #! [reshape_undefined] core = ov.Core() model = core.read_model("model.xml") # Set one static dimension (= 1) and another dynamic dimension (= Dimension()) model.reshape([1, ov.Dimension()]) # The same as above model.reshape([1, -1]) # The same as above model.reshape("1, ?") # Or set both dimensions as dynamic if both are going to be changed dynamically model.reshape([ov.Dimension(), ov.Dimension()]) # The same as above model.reshape([-1, -1]) # The same as above model.reshape("?, ?") #! [reshape_undefined]