def _( inputs: Union[np.number, float, int], request: InferRequestBase, key: Union[str, int, ConstOutput] = None, ) -> None: set_scalar_tensor(request, Tensor(np.ndarray([], type(inputs), np.array(inputs))), key)
def normalize_inputs(py_dict: dict, py_types: dict) -> dict: """Normalize a dictionary of inputs to Tensors.""" for k, val in py_dict.items(): if not isinstance(k, (str, int)): raise TypeError( "Incompatible key type for tensor named: {}".format(k)) try: ov_type = py_types[k] except KeyError: raise KeyError("Port for tensor named {} was not found!".format(k)) py_dict[k] = (val if isinstance(val, Tensor) else Tensor( np.array(val, get_dtype(ov_type)))) return py_dict
def convert_dict_items(inputs: dict, py_types: dict) -> dict: """Helper function converting dictionary items to Tensors.""" # Create new temporary dictionary. # new_inputs will be used to transfer data to inference calls, # ensuring that original inputs are not overwritten with Tensors. new_inputs = {} for k, val in inputs.items(): if not isinstance(k, (str, int, ConstOutput)): raise TypeError("Incompatible key type for tensor: {}".format(k)) try: ov_type = py_types[k] except KeyError: raise KeyError("Port for tensor {} was not found!".format(k)) # Convert numpy arrays or copy Tensors new_inputs[k] = (val if isinstance(val, Tensor) else Tensor( np.array(val, get_dtype(ov_type), copy=False))) return new_inputs
def normalize_inputs(inputs: Union[dict, list], py_types: dict) -> dict: """Normalize a dictionary of inputs to Tensors.""" if isinstance(inputs, list): inputs = {index: input for index, input in enumerate(inputs)} for k, val in inputs.items(): if not isinstance(k, (str, int)): raise TypeError("Incompatible key type for tensor named: {}".format(k)) try: ov_type = py_types[k] except KeyError: raise KeyError("Port for tensor named {} was not found!".format(k)) inputs[k] = ( val if isinstance(val, Tensor) else Tensor(np.array(val, get_dtype(ov_type))) ) return inputs
def _( inputs: np.ndarray, request: InferRequestBase, key: Union[str, int, ConstOutput] = None, ) -> None: # If shape is "empty", assume this is a scalar value if not inputs.shape: set_scalar_tensor(request, Tensor(inputs), key) else: if key is None: tensor = request.get_input_tensor() elif isinstance(key, int): tensor = request.get_input_tensor(key) elif isinstance(key, (str, ConstOutput)): tensor = request.get_tensor(key) else: raise TypeError( "Unsupported key type: {} for Tensor under key: {}".format( type(key), key)) # Update shape if there is a mismatch if tensor.shape != inputs.shape: tensor.shape = inputs.shape # When copying, type should be up/down-casted automatically. tensor.data[:] = inputs[:]
def tensor_from_file(path: str) -> Tensor: """Create Tensor from file. Data will be read with dtype of unit8.""" return Tensor(np.fromfile(path, dtype=np.uint8))
def normalize_inputs(py_dict: dict) -> dict: """Normalize a dictionary of inputs to contiguous numpy arrays.""" return { k: (Tensor(v) if isinstance(v, np.ndarray) else v) for k, v in py_dict.items() }
def tensor_from_file(path: str) -> Tensor: """The data will be read with dtype of unit8""" return Tensor(np.fromfile(path, dtype=np.uint8))