def post_processing(outputs_path, outputs_path_other): # Compare outputs with e.g. fp16 and fp32 record = {} if_close = {} import glob for filename in glob.glob(os.path.join(outputs_path, '*.tensorproto')): filename_other = os.path.join(outputs_path_other, Path(filename).name) if not os.path.exists(filename_other): continue with open(filename, 'rb') as f: tensor = TensorProto() tensor.ParseFromString(f.read()) array = numpy_helper.to_array(tensor) with open(filename_other, 'rb') as f: tensor_other = TensorProto() tensor_other.ParseFromString(f.read()) array_other = numpy_helper.to_array(tensor_other) if array_other.size == 0: continue diff = numpy.average( numpy.abs(array_other - array) / (numpy.abs(array_other) + 1e-6)) if math.isnan(diff): continue record[Path(filename).name.split(".")[0]] = diff if_close[Path(filename).name.split(".")[0]] = numpy.allclose( array, array_other, rtol=1e-04, atol=1e-04) results = [f"Node\tDiff\tClose"] for k, v in sorted(record.items(), key=lambda x: x[1], reverse=True): results.append(f"{k}\t{v}\t{if_close[k]}") for line in results: print(line)
def get_test_files(name): """Extract tar file and returns model path and input, output data""" tar_name = download(URLS.get(name), dirname=CURR_PATH.__str__()) # extract tar file tar_path = os.path.join(CURR_PATH, tar_name) tar = tarfile.open(tar_path.__str__(), "r:*") tar.extractall(path=CURR_PATH.__str__()) tar.close() data_dir = os.path.join(CURR_PATH, name) model_path = os.path.join(data_dir, 'model.onnx') inputs = [] outputs = [] # get test files for test_file in os.listdir(data_dir): case_dir = os.path.join(data_dir, test_file) # skip the non-dir files if not os.path.isdir(case_dir): continue input_file = os.path.join(case_dir, 'input_0.pb') input_tensor = TensorProto() with open(input_file, 'rb') as proto_file: input_tensor.ParseFromString(proto_file.read()) inputs.append(numpy_helper.to_array(input_tensor)) output_tensor = TensorProto() output_file = os.path.join(case_dir, 'output_0.pb') with open(output_file, 'rb') as proto_file: output_tensor.ParseFromString(proto_file.read()) outputs.append(numpy_helper.to_array(output_tensor)) return model_path, inputs, outputs
def caffe2_op_to_onnx_node(cls, op_def, shapes): if C.support_onnx_export(op_def.type): shape_list = list(shapes.values()) node_strs, tensor_strs = C.export_to_onnx( op_def.SerializeToString(), shapes) nodes = [] for s in node_strs: node = NodeProto() node.ParseFromString(s) nodes.append(node) const_tensors = [] for s in tensor_strs: tensor = TensorProto() tensor.ParseFromString(s) const_tensors.append(tensor) return nodes, const_tensors elif op_def.type in cls._special_operators: translator = getattr(cls, cls._special_operators[op_def.type]) else: translator = cls._common_caffe2_op_to_onnx_node nodes = translator(op_def, shapes) const_tensors = [] if isinstance(nodes, tuple): nodes, const_tensors = nodes if not isinstance(nodes, collections.Iterable): nodes = [nodes] return nodes, const_tensors
def from_bytes(b): """ Retrieves an array from bytes then protobuf. @param b bytes @return array .. exref:: :title: Converts bytes into an array (serialization) Useful to deserialize. .. runpython:: :showcode: import numpy from mlprodict.onnxrt.onnx2py_helper import to_bytes, from_bytes data = numpy.array([[0, 1], [2, 3], [4, 5]], dtype=numpy.float32) pb = to_bytes(data) data2 = from_bytes(pb) print(data2) """ if isinstance(b, bytes): pb = TensorProto() pb.ParseFromString(b) else: pb = b return to_array(pb)
def load_data(folder): """ Restores protobuf data stored in a folder. :param folder: folder :return: dictionary """ res = OrderedDict() res['in'] = OrderedDict() res['out'] = OrderedDict() files = os.listdir(folder) for name in files: noext, ext = os.path.splitext(name) if ext == '.pb': data = TensorProto() with open(os.path.join(folder, name), 'rb') as f: data.ParseFromString(f.read()) if noext.startswith('input'): res['in'][noext] = numpy_helper.to_array(data) elif noext.startswith('output'): res['out'][noext] = numpy_helper.to_array(data) else: raise ValueError( # pragma: no cover "Unable to guess anything about %r." % noext) return res
def create_onnx_components(): """创建onnx的基本组件:node, graph, model,可以看到onnx是如何本onnx.proto文件对应的 参考:https://github.com/onnx/onnx/blob/master/onnx/examples/Protobufs.ipynb """ # ------ 创建int变量: 传入数值和描述即可 ------ arg1 = helper.make_attribute("this is INT", 64) arg2 = helper.make_attribute("this is float/1", 3.14) arg3 = helper.make_attribute("this is STRING", "helloworld") arg4 = helper.make_attribute("this is INTS", [1,2,3,4]) # ------ 创建TensorProto ------ tensor0 = helper.make_tensor_value_info() # ? array1 = np.array([[1,2,3],[4,5,6]]) tensor1 = numpy_helper.from_array(array1) # 从numpy获取tensorProto with open('ts.pb', 'wb') as f: f.write(tensor1.SerializeToString()) # 保存tensorProto tensor2 = TensorProto() with open('ts.pb', 'rb') as f: tensor2.ParseFromString(f.read()) # 读取tensorProto with # ------ 创建node ------ node1 = helper.make_node("Relu", ["X"], ["Y"]) # op_type="Relu" node2 = helper.make_node("Conv", ["X", "W", "Y"], kernel=3, stride=1, pad=1) print(node2) print(helper.printable_node(node2)) # 这就是常看到的onnx形式:%Y = Conv[] # ------ 创建graph ------ node_list = [] arg_list = [] graph1 = helper.make_graph( [ helper.make_node("FC", ["X", "W1", "B1"], ["H1"]), helper.make_node("Relu", ["H1"], ["R1"]), helper.make_node("FC", ["R1", "W2", "B2"], ["Y"]), ], "MLP", [ helper.make_tensor_value_info('X' , TensorProto.FLOAT, [1]), helper.make_tensor_value_info('W1', TensorProto.FLOAT, [1]), helper.make_tensor_value_info('B1', TensorProto.FLOAT, [1]), helper.make_tensor_value_info('W2', TensorProto.FLOAT, [1]), helper.make_tensor_value_info('B2', TensorProto.FLOAT, [1]), ], [ helper.make_tensor_value_info('Y', TensorProto.FLOAT, [1]), ])
def load_checkpoint_to_model(path_to_checkpoint, model): """Loads the checkpoint to an onnx inference model.""" # Load the parameters from the checkpoint parameters = _internal_load_checkpoint(path_to_checkpoint) parameters_dict = {} for param in parameters: param_proto = TensorProto() param_proto.ParseFromString(param) parameters_dict[param_proto.name] = param_proto for initializer in model.graph.initializer: initializer.CopyFrom(parameters_dict[initializer.name])
def add_onnx_graph(scope, operator, container, onx): """ Adds a whole ONNX graph to an existing one following :epkg:`skl2onnx` API assuming this ONNX graph implements an `operator <http://onnx.ai/sklearn-onnx/api_summary.html? highlight=operator#skl2onnx.common._topology.Operator>`_. :param scope: scope (to get unique names) :param operator: operator :param container: container :param onx: ONNX graph """ graph = onx.graph name_mapping = {} node_mapping = {} for node in graph.node: name = node.name if name is not None: node_mapping[node.name] = _clean_initializer_name(node.name, scope) for o in node.input: name_mapping[o] = _clean_variable_name(o, scope) for o in node.output: name_mapping[o] = _clean_variable_name(o, scope) for o in graph.initializer: name_mapping[o.name] = _clean_operator_name(o.name, scope) inputs = [_copy_inout(o, scope, name_mapping[o.name]) for o in graph.input] outputs = [ _copy_inout(o, scope, name_mapping[o.name]) for o in graph.output ] for inp, to in zip(operator.inputs, inputs): n = helper.make_node('Identity', [inp.onnx_name], [to.name], name=_clean_operator_name('Identity', scope)) container.nodes.append(n) for inp, to in zip(outputs, operator.outputs): n = helper.make_node('Identity', [inp.name], [to.onnx_name], name=_clean_operator_name('Identity', scope)) container.nodes.append(n) for node in graph.node: n = helper.make_node( node.op_type, [name_mapping[o] for o in node.input], [name_mapping[o] for o in node.output], name=node_mapping[node.name] if node.name else None, domain=node.domain if node.domain else None) n.attribute.extend(node.attribute) # pylint: disable=E1101 container.nodes.append(n) for o in graph.initializer: as_str = o.SerializeToString() tensor = TensorProto() tensor.ParseFromString(as_str) tensor.name = name_mapping[o.name] container.initializers.append(tensor) # opset for oimp in onx.opset_import: container.node_domain_version_pair_sets.add( (oimp.domain, oimp.version))
import os import mxnet as mx from mxnet.contrib import onnx as onnx_mxnet import onnx import numpy as np from onnx import TensorProto from onnx import numpy_helper curr_dir = os.path.dirname(__file__) sym, arg_params, aux_params = onnx_mxnet.import_model(curr_dir + "/model.onnx") input_tensor = TensorProto() with open(curr_dir + "/input_0.pb", 'rb') as proto_file: input_tensor.ParseFromString(proto_file.read()) input_array = numpy_helper.to_array(input_tensor) x = mx.nd.array(input_array) mod = mx.mod.Module(symbol=sym, data_names=['0'], context=mx.cpu(), label_names=None) mod.bind(for_training=False, data_shapes=[('0', (2, 4, 6, 6))], label_shapes=None) mod.set_params(arg_params=arg_params, aux_params=aux_params) mod.forward(mx.io.DataBatch([x])) result = mod.get_outputs()[0].asnumpy()