示例#1
0
    def __call__(self, *input_values: NumericData) -> List[NumericData]:
        """Run computation on input values and return result."""
        input_values = [np.array(input_value) for input_value in input_values]
        input_shapes = [get_shape(input_value) for input_value in input_values]

        param_names = [param.friendly_name for param in self.parameters]

        if self.network_cache.get(str(input_shapes)) is None:
            capsule = Function.to_capsule(self.function)
            cnn_network = IENetwork(capsule)
            if self.function.is_dynamic():
                cnn_network.reshape(dict(zip(param_names, input_shapes)))
            # Convert unsupported inputs of the network
            _convert_inputs(cnn_network)
            self.network_cache[str(input_shapes)] = cnn_network
        else:
            cnn_network = self.network_cache[str(input_shapes)]

        executable_network = self.runtime.backend.load_network(
            cnn_network, self.runtime.backend_name)

        # Input validation
        if len(input_values) != len(self.parameters):
            raise UserInputError("Expected %s parameters, received %s.",
                                 len(self.parameters), len(input_values))
        for parameter, input in zip(self.parameters, input_values):
            parameter_shape = parameter.get_output_partial_shape(0)
            input_shape = PartialShape(input.shape)
            if len(input.shape) > 0 and not parameter_shape.compatible(
                    input_shape):
                raise UserInputError(
                    "Provided tensor's shape: %s does not match the expected: %s.",
                    input_shape,
                    parameter_shape,
                )

        request = executable_network.requests[0]
        request.infer(dict(zip(param_names, input_values)))

        # Set order of output blobs compatible with nG Function
        result_buffers = [
            self.__get_ie_output_blob_buffer(request.output_blobs, result)
            for result in self.results
        ]

        # Since OV overwrite result data type we have to convert results to the original one.
        original_dtypes = [
            get_dtype(result.get_output_element_type(0))
            for result in self.results
        ]
        converted_buffers = [
            buffer.astype(original_dtype)
            for buffer, original_dtype in zip(result_buffers, original_dtypes)
        ]
        return converted_buffers
示例#2
0
    def __call__(self, *input_values: NumericData) -> List[NumericData]:
        """Run computation on input values and return result."""
        input_values = [np.array(input_value) for input_value in input_values]
        input_shapes = [get_shape(input_value) for input_value in input_values]

        if self.network_cache.get(str(input_shapes)) is None:
            capsule = Function.to_capsule(self.function)
            cnn_network = IENetwork(capsule)
            if self.function.is_dynamic():
                param_names = [
                    param.friendly_name for param in self.parameters
                ]
                cnn_network.reshape(dict(zip(param_names, input_shapes)))
            self.network_cache[str(input_shapes)] = cnn_network
        else:
            cnn_network = self.network_cache[str(input_shapes)]

        executable_network = self.runtime.backend.load_network(
            cnn_network, self.runtime.backend_name)

        # Input validation
        if len(input_values) != len(self.parameters):
            raise UserInputError("Expected %s parameters, received %s.",
                                 len(self.parameters), len(input_values))
        for parameter, input in zip(self.parameters, input_values):
            parameter_shape = parameter.get_output_partial_shape(0)
            input_shape = PartialShape(input.shape)
            if len(input.shape) > 0 and not parameter_shape.compatible(
                    input_shape):
                raise UserInputError(
                    "Provided tensor's shape: %s does not match the expected: %s.",
                    input_shape,
                    parameter_shape,
                )

        request = executable_network.requests[0]

        request.infer(dict(zip(request._inputs_list, input_values)))
        return [blob.buffer for blob in request.output_blobs.values()]