def create_floor_net(self, shape, ir_version): """ Tensorflow net IR net Input->Floor => Input->Floor """ # # Create Tensorflow model # import tensorflow as tf tf.compat.v1.reset_default_graph() # Create the graph and model with tf.compat.v1.Session() as sess: shapes = shape.copy() # reshaping if len(shapes) >= 3: shapes.append(shapes.pop(1)) input = tf.compat.v1.placeholder(tf.float32, shapes, 'Input') tf.floor(input, name='Operation') tf.compat.v1.global_variables_initializer() tf_net = sess.graph_def ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'Floor': { 'kind': 'op', 'type': 'Floor' }, 'Floor_data': { 'shape': shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data', 'Floor'), ('Floor', 'Floor_data'), ('Floor_data', 'result')]) return tf_net, ref_net
def create_swish_net(self, shape, ir_version, use_new_frontend): """ Tensorflow net IR net Input->Swish => Input->Swish """ # # Create Tensorflow model # import tensorflow as tf tf.reset_default_graph() # Create the graph and model with tf.Session() as sess: tf_x_shape = shape.copy() tf_x_shape = permute_nchw_to_nhwc(tf_x_shape, use_new_frontend) input = tf.placeholder(tf.float32, tf_x_shape, 'Input') tf.nn.swish(input) tf.global_variables_initializer() tf_net = sess.graph_def # # Create reference IR net # Please, specify 'type': 'Input' for input node # Moreover, do not forget to validate ALL layer attributes!!! # ref_net = None if check_ir_version(10, None, ir_version) and not use_new_frontend: nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'Swish': { 'kind': 'op', 'type': 'Swish' }, 'Swish_data': { 'shape': shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data', 'Swish'), ('Swish', 'Swish_data'), ('Swish_data', 'result')]) return tf_net, ref_net
def create_net_const(self, shape, precision, ir_version): """ ONNX net IR net Input->Concat(+Softplus const)->Output => Input->Concat(+const) """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto import numpy as np concat_axis = 0 output_shape = shape.copy() output_shape[concat_axis] *= 2 input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, output_shape) constant = np.random.rand(*shape).astype(np.float32) * 255 + 0.5 node_const_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['const1'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.FLOAT, dims=constant.shape, vals=constant.flatten(), ), ) node_def = onnx.helper.make_node( 'Softplus', inputs=['const1'], outputs=['Softplus1'], ) node_concat_def = onnx.helper.make_node( 'Concat', inputs=['input', 'Softplus1'], outputs=['output'], axis=concat_axis ) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_const_def, node_def, node_concat_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # constant = np.log(np.exp(constant) + 1.0) if precision == 'FP16': constant = constant.astype(np.float16) ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': {'kind': 'op', 'type': 'Parameter'}, 'input_data': {'shape': shape, 'kind': 'data'}, 'input_const_data': {'kind': 'data', 'value': constant.flatten()}, 'const': {'kind': 'op', 'type': 'Const'}, 'const_data': {'shape': shape, 'kind': 'data'}, 'concat': {'kind': 'op', 'type': 'Concat', 'axis': concat_axis}, 'concat_data': {'shape': output_shape, 'kind': 'data'}, 'result': {'kind': 'op', 'type': 'Result'} } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_const_data', 'const'), ('const', 'const_data'), ('input_data', 'concat'), ('const_data', 'concat'), ('concat', 'concat_data'), ('concat_data', 'result') ]) return onnx_net, ref_net
def create_net(self, shape, ir_version): """ ONNX net IR net Input->Sqrt->Output => Input->Power """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, shape) node_def = onnx.helper.make_node('Sqrt', inputs=['input'], outputs=['output']) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'const_indata': { 'shape': None, 'kind': 'data' }, 'const': { 'kind': 'op', 'type': 'Const' }, 'const_data': { 'shape': np.ones(len(shape)), 'kind': 'data' }, 'node': { 'kind': 'op', 'type': 'Power' }, 'node_data': { 'shape': shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('const_indata', 'const'), ('const', 'const_data'), ('input_data', 'node'), ('const_data', 'node'), ('node', 'node_data'), ('node_data', 'result')]) return onnx_net, ref_net
def create_net_const(self, shape1, shape2, ir_version): """ ONNX net IR net Input->Concat with const xor const->Output => Input->Concat """ # # Create ONNX model # from onnx import helper from onnx import TensorProto concat_axis = 0 output_shape = list(shape1) output_shape[concat_axis] *= 2 input = helper.make_tensor_value_info('input', TensorProto.BOOL, shape1) output = helper.make_tensor_value_info('output', TensorProto.BOOL, output_shape) const1 = np.random.randint(0, 2, shape1).astype(np.bool) const2 = np.random.randint(0, 2, shape2).astype(np.bool) node_const1_def = helper.make_node( 'Constant', inputs=[], outputs=['const1'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.BOOL, dims=const1.shape, vals=const1.flatten(), ), ) node_const2_def = helper.make_node( 'Constant', inputs=[], outputs=['const2'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.BOOL, dims=const2.shape, vals=const2.flatten(), ), ) node_def = helper.make_node( 'Xor', inputs=['const1', 'const2'], outputs=['node_out'] ) node_concat_def = helper.make_node( 'Concat', inputs=['input', 'node_out'], outputs=['output'], axis=concat_axis ) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_const1_def, node_const2_def, node_def, node_concat_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # Create reference IR net constant_calculated = np.logical_xor(const1, const2) ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': {'kind': 'op', 'type': 'Parameter'}, 'input_data': {'shape': const1.shape, 'kind': 'data'}, 'input_const_data': {'kind': 'data', 'value': constant_calculated.flatten()}, 'const': {'kind': 'op', 'type': 'Const'}, 'const_data': {'shape': const1.shape, 'kind': 'data'}, 'concat': {'kind': 'op', 'type': 'Concat', 'axis': concat_axis}, 'concat_data': {'shape': output_shape, 'kind': 'data'}, 'result': {'kind': 'op', 'type': 'Result'} } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_const_data', 'const'), ('const', 'const_data'), ('input_data', 'concat'), ('const_data', 'concat'), ('concat', 'concat_data'), ('concat_data', 'result')]) return onnx_net, ref_net
def create_reduce_lp(self, shape, axes, keep_dims, reduce_p, ir_version): """ ONNX net IR net Input->ReduceLX(axes)->Output => Input->ReduceLX """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto output_shape = shape.copy() _axes = axes.copy() if axes is not None else list(range(len(shape))) for axis in _axes: output_shape[axis] = 1 if not keep_dims: output_shape = [dim for dim in output_shape if dim != 1] input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, output_shape) args = dict(keepdims=keep_dims) if axes: args['axes'] = axes node_def = onnx.helper.make_node( "ReduceL" + str(reduce_p), inputs=['input'], outputs=['output'], **args ) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # Please, specify 'type': 'Input' for input node # Moreover, do not forget to validate ALL layer attributes!!! # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': {'kind': 'op', 'type': 'Parameter'}, 'input_data': {'shape': shape, 'kind': 'data'}, 'input_data_1': {'shape': [len(_axes)], 'value': _axes, 'kind': 'data'}, 'const_1': {'kind': 'op', 'type': 'Const'}, 'const_data_1': {'shape': [len(_axes)], 'kind': 'data'}, 'reduce': {'kind': 'op', 'type': "ReduceL" + str(reduce_p), 'keep_dims': keep_dims}, 'reduce_data': {'shape': output_shape, 'kind': 'data'}, 'result': {'kind': 'op', 'type': 'Result'} } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data_1', 'const_1'), ('const_1', 'const_data_1'), ('input_data', 'reduce'), ('const_data_1', 'reduce'), ('reduce', 'reduce_data'), ('reduce_data', 'result') ]) return onnx_net, ref_net
def create_net(self, shape, kernel_shape, pads, strides, op, ir_version, count_include_pad=None, auto_pad=None, storage_order=None, ceil=False, opset=None): """ ONNX net IR net Input->Pooling>Output => Input->Pooling """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto node_args = dict(kernel_shape=kernel_shape) if auto_pad is not None: node_args['auto_pad'] = auto_pad if auto_pad == 'VALID': pads = np.zeros(len(shape[2:]) * 2, dtype=np.int) else: auto_pad = 'NOTSET' if count_include_pad is not None: node_args['count_include_pad'] = count_include_pad else: count_include_pad = 0 if storage_order is not None: node_args['storage_order'] = storage_order if pads is not None: if auto_pad == 'NOTSET': node_args['pads'] = pads _pads = np.transpose(np.array(pads).reshape([2, -1])) else: _pads = np.zeros([len(kernel_shape), 2]) if strides is not None: node_args['strides'] = strides else: strides = np.ones(len(kernel_shape)) if ceil: node_args['ceil_mode'] = 1 if auto_pad in ['SAME_UPPER', 'SAME_LOWER']: out_spacial_shape = np.ceil( np.array(shape[2:], dtype=np.float) / strides) else: rounding = np.ceil if ceil else np.floor out_spacial_shape = rounding( (float_array(shape[2:]) + np.add(_pads[:, 0], _pads[:, 1]) - float_array(kernel_shape)) / strides + 1) out_shape = np.array(shape) out_shape[2:] = out_spacial_shape out_shape = out_shape.astype(np.int).tolist() concat_axis = 0 out_concat_shape = out_shape.copy() out_concat_shape[concat_axis] *= 2 input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, out_concat_shape) constant = np.random.randint(-127, 127, out_shape).astype(np.float) node_def = onnx.helper.make_node(op, inputs=['input'], outputs=['pool'], **node_args) node_const_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['const1'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.FLOAT, dims=constant.shape, vals=constant.flatten(), ), ) node_concat_def = onnx.helper.make_node('Concat', inputs=['pool', 'const1'], outputs=['output'], axis=concat_axis) graph_def = helper.make_graph( [node_def, node_const_def, node_concat_def], 'test_model', [input], [output], ) # Create the model (ModelProto) args = dict(producer_name='test_model') if opset: args['opset_imports'] = [helper.make_opsetid("", opset)] onnx_net = helper.make_model(graph_def, **args) # # Create reference IR net # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'node': { 'kind': 'op', 'type': None, 'pads_begin': _pads[:, 0] if len(shape) > 3 else _pads[0, 0], 'pads_end': _pads[:, 1] if len(shape) > 3 else _pads[0, 1], 'kernel': kernel_shape[0] if len(kernel_shape) == 1 else kernel_shape, 'rounding_type': 'ceil' if auto_pad != 'NOTSET' or ceil else 'floor', 'auto_pad': None }, 'node_data': { 'shape': out_shape, 'kind': 'data' }, 'node_indicies_data': { 'shape': out_shape, 'kind': 'data' }, 'input_const_data': { 'kind': 'data', 'value': constant.flatten() }, 'const': { 'kind': 'op', 'type': 'Const' }, 'const_data': { 'shape': out_shape, 'kind': 'data' }, 'concat': { 'kind': 'op', 'type': 'Concat', 'axis': concat_axis }, 'concat_data': { 'shape': out_concat_shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } if op == 'AveragePool': nodes_attributes['node']['type'] = 'AvgPool' nodes_attributes['node'][ 'exclude-pad'] = True if count_include_pad == 0 else False else: nodes_attributes['node']['type'] = 'MaxPool' edges = [('input', 'input_data'), ('input_data', 'node'), ('node', 'node_data', { 'out': 0 }), ('input_const_data', 'const'), ('const', 'const_data'), ('node_data', 'concat'), ('const_data', 'concat'), ('concat', 'concat_data'), ('concat_data', 'result')] if op == "MaxPool": edges.append(('node', 'node_indicies_data', {'out': 1})) ref_net = build_graph(nodes_attributes, edges, nodes_with_edges_only=True) return onnx_net, ref_net
def create_elu_net(self, shape, ir_version): """ Tensorflow net IR net Input->ELU => Input->ELU """ # # Create Tensorflow model # import tensorflow as tf tf.compat.v1.reset_default_graph() # Create the graph and model with tf.compat.v1.Session() as sess: shapes = shape.copy() # reshaping if len(shapes) >= 4: shapes.append(shapes.pop(1)) input = tf.compat.v1.placeholder(tf.float32, shapes, 'Input') tf.nn.elu(input, name='Operation') tf.compat.v1.global_variables_initializer() tf_net = sess.graph_def # # Create reference IR net # Please, specify 'type': 'Input' for input node # Moreover, do not forget to validate ALL layer attributes!!! # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'ELU': { 'kind': 'op', 'type': 'Elu' }, 'ELU_data': { 'shape': shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data', 'ELU'), ('ELU', 'ELU_data'), ('ELU_data', 'result')]) return tf_net, ref_net
def create_concat_net_const(self, input_shape, output_shape, axis, ir_version): """ ONNX net IR net Input(const)----->Concat--------->Concat->Output => Input--->Concat Input(const)-----' ' Const---' Input-' """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto import numpy as np concat_axis = 0 concat_output_shape = output_shape.copy() concat_output_shape[concat_axis] *= 2 const_number = np.prod(input_shape) constant = np.random.randint(-127, 127, const_number).astype(np.float) input = helper.make_tensor_value_info('input', TensorProto.FLOAT, output_shape) # Output for concat output = helper.make_tensor_value_info('output', TensorProto.FLOAT, concat_output_shape) node_const1_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['const1'], value=helper.make_tensor( name='const1_tensor', data_type=TensorProto.FLOAT, dims=input_shape, vals=constant, ), ) node_const2_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['const2'], value=helper.make_tensor( name='const2_tensor', data_type=TensorProto.FLOAT, dims=input_shape, vals=constant, ), ) node_concat_def = onnx.helper.make_node('Concat', inputs=['const1', 'const2'], outputs=['output_concat'], axis=axis) node_dyn_concat_def = onnx.helper.make_node( 'Concat', inputs=['input', 'output_concat'], outputs=['output'], axis=concat_axis) # Create the graph (GraphProto) graph_def = helper.make_graph( [ node_const1_def, node_const2_def, node_concat_def, node_dyn_concat_def ], 'test_concat_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_split_model') # # Create reference IR net # Please, specify 'type': 'Input' for input node # Moreover, do not forget to validate ALL layer attributes!!! # constant_reshape = np.reshape(constant, input_shape) constant_reshape = np.concatenate([constant_reshape, constant_reshape], axis=axis) ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': output_shape, 'kind': 'data' }, 'input_const_data': { 'kind': 'data', 'value': constant_reshape.flatten() }, 'const': { 'kind': 'op', 'type': 'Const' }, 'const_data': { 'shape': output_shape, 'value': None, 'kind': 'data' }, 'concat': { 'kind': 'op', 'type': 'Concat', 'axis': concat_axis }, 'concat_data': { 'shape': concat_output_shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_const_data', 'const'), ('const', 'const_data'), ('input_data', 'concat'), ('const_data', 'concat'), ('concat', 'concat_data'), ('concat_data', 'result')]) return onnx_net, ref_net
def create_net_const(self, shape, op, precision, ir_version): # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto assert op in [ 'Sin', 'Sinh', 'Asin', 'Cos', 'Cosh', 'Acos', 'Tan', 'Tanh', 'Atan' ] concat_axis = 0 output_shape = shape.copy() output_shape[concat_axis] *= 2 input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, output_shape) constant = np.random.rand(*shape).astype(np.float) node_const_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['const'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.FLOAT, dims=constant.shape, vals=constant.flatten(), ), ) node_def = onnx.helper.make_node(op, inputs=['const'], outputs=['res']) node_concat_def = onnx.helper.make_node('Concat', inputs=['input', 'res'], outputs=['output'], axis=concat_axis) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_const_def, node_def, node_concat_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # if op == 'Sin': constant = np.sin(constant) elif op == 'Sinh': constant = np.sinh(constant) elif op == 'Asin': constant = np.arcsin(constant) elif op == 'Cos': constant = np.cos(constant) elif op == 'Cosh': constant = np.cosh(constant) elif op == 'Acos': constant = np.arccos(constant) elif op == 'Tan': constant = np.tan(constant) elif op == 'Tanh': constant = np.tanh(constant) elif op == 'Atan': constant = np.arctan(constant) if precision == 'FP16': constant = constant.astype(np.float16) ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'input_const_data': { 'kind': 'data', 'value': constant.flatten() }, 'const': { 'kind': 'op', 'type': 'Const' }, 'const_data': { 'shape': shape, 'kind': 'data' }, 'concat': { 'kind': 'op', 'type': 'Concat', 'axis': concat_axis }, 'concat_data': { 'shape': output_shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_const_data', 'const'), ('const', 'const_data'), ('input_data', 'concat'), ('const_data', 'concat'), ('concat', 'concat_data'), ('concat_data', 'result')]) return onnx_net, ref_net
def create_net_const(self, input_value, output_value, precision, ir_version): """ ONNX net IR net Input->Concat(+NonZero const)->Output => Input->Concat(+const)->Result """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto concat_axis = 0 output_shape = list(output_value.shape) output_shape[concat_axis] *= 2 input = helper.make_tensor_value_info('input', TensorProto.FLOAT, output_value.shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, output_shape) node_const_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['const1'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.FLOAT, dims=input_value.shape, vals=input_value.flatten(), ), ) node_def = onnx.helper.make_node('NonZero', inputs=['const1'], outputs=['nonzero1']) node_concat_def = onnx.helper.make_node('Concat', inputs=['input', 'nonzero1'], outputs=['output'], axis=concat_axis) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_const_def, node_def, node_concat_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': output_value.shape, 'kind': 'data' }, 'input_const_data': { 'kind': 'data', 'value': output_value.flatten() }, 'const': { 'kind': 'op', 'type': 'Const' }, 'const_data': { 'shape': output_value.shape, 'kind': 'data' }, 'concat': { 'kind': 'op', 'type': 'Concat', 'axis': concat_axis }, 'concat_data': { 'shape': output_shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_const_data', 'const'), ('const', 'const_data'), ('input_data', 'concat'), ('const_data', 'concat'), ('concat', 'concat_data'), ('concat_data', 'result')]) return onnx_net, ref_net
def create_net(self, shape, op, ir_version): # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto assert op in [ 'Sin', 'Sinh', 'Asin', 'Cos', 'Cosh', 'Acos', 'Tan', 'Tanh', 'Atan' ] input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, shape) node_def = onnx.helper.make_node(op, inputs=['input'], outputs=['output']) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'node': { 'kind': 'op', 'type': op }, 'node_data': { 'shape': shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data', 'node'), ('node', 'node_data'), ('node_data', 'result')]) return onnx_net, ref_net
def create_net(self, shape1, shape2, ir_version): """ ONNX net IR net Input->Or with 2nd input->Output => Input->LogicalOr """ # # Create ONNX model # from onnx import helper from onnx import TensorProto input1 = helper.make_tensor_value_info('input1', TensorProto.BOOL, shape1) input2 = helper.make_tensor_value_info('input2', TensorProto.BOOL, shape2) output = helper.make_tensor_value_info('output', TensorProto.BOOL, shape1) node_def = helper.make_node('Or', inputs=['input1', 'input2'], outputs=['output']) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_def], 'test_model', [input1, input2], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # Create reference IR net ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input1': { 'kind': 'op', 'type': 'Parameter' }, 'input1_data': { 'shape': shape1, 'kind': 'data' }, 'input2': { 'kind': 'op', 'type': 'Parameter' }, 'input2_data': { 'shape': shape2, 'kind': 'data' }, 'node': { 'kind': 'op', 'type': 'LogicalOr' }, 'node_data': { 'shape': shape1, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input1', 'input1_data'), ('input2', 'input2_data'), ('input1_data', 'node'), ('input2_data', 'node'), ('node', 'node_data'), ('node_data', 'result')]) return onnx_net, ref_net
def create_tf_random_uniform_net(self, global_seed, op_seed, x_shape, min_val, max_val, input_type, precision, ir_version, use_new_frontend): tf.compat.v1.reset_default_graph() # Create the graph and model with tf.compat.v1.Session() as sess: tf_x_shape = x_shape.copy() tf_x_shape = permute_nchw_to_nhwc(tf_x_shape, use_new_frontend) x = tf.compat.v1.placeholder(input_type, tf_x_shape, 'Input') if global_seed is not None: tf.compat.v1.random.set_random_seed(global_seed) random_uniform = tf.random.uniform(tf_x_shape, seed=op_seed, dtype=input_type, minval=min_val, maxval=max_val) + x tf.compat.v1.global_variables_initializer() tf_net = sess.graph_def ref_net = None if check_ir_version(10, None, ir_version) and not use_new_frontend: const_for_layer_tests = lambda name, value, shape, shape1: { **{name + '_dd': {'kind': 'data', 'value': value, 'shape': shape1}}, **{name: {'kind': 'op', 'type': 'Const'}}, **shaped_data(name + '_d', shape)} connect_const_for_layer_tests = lambda first_tensor_name, second_tensor_name: [ *connect_front(first_tensor_name + '_dd', first_tensor_name), *connect(first_tensor_name, second_tensor_name)] nodes_attributes = { **regular_op_with_shaped_data('input', x_shape, {'type': 'Parameter'}), **const_for_layer_tests('shape', x_shape, int64_array([len(x_shape)]), int64_array([len(x_shape)])), **const_for_layer_tests('min_val', min_val, int64_array([]), int64_array([1])), **const_for_layer_tests('max_val', max_val, int64_array([]), int64_array([1])), **regular_op_with_shaped_data('random_uniform', x_shape, {'type': 'RandomUniform'}), **regular_op_with_shaped_data('convert', x_shape, {'type': 'Convert'}), **regular_op_with_shaped_data('add', x_shape, {'type': 'Add'}), **regular_op_with_shaped_data('result', x_shape, {'type': 'Result'}), } if precision == 'FP16' and input_type == tf.float32: ref_net = build_graph(nodes_attributes, [*connect_const_for_layer_tests('shape', '0:random_uniform'), *connect_const_for_layer_tests('min_val', '1:random_uniform'), *connect_const_for_layer_tests('max_val', '2:random_uniform'), *connect('random_uniform', 'convert'), *connect('convert', '0:add'), *connect('input', '1:add'), *connect('add', 'result')]) else: ref_net = build_graph(nodes_attributes, [*connect_const_for_layer_tests('shape', '0:random_uniform'), *connect_const_for_layer_tests('min_val', '1:random_uniform'), *connect_const_for_layer_tests('max_val', '2:random_uniform'), *connect('random_uniform', '0:add'), *connect('input', '1:add'), *connect('add', 'result')]) return tf_net, ref_net
def create_fake_quantize_net(self, il, ih, num_bits, narrow_range, nudged_il, nudged_ih, expected_step, ir_version, use_new_frontend): # original tf model import tensorflow as tf tf.compat.v1.reset_default_graph() with tf.compat.v1.Session() as sess: data = tf.compat.v1.placeholder(tf.float32, [11], 'parameter') input_min = tf.constant(il, name='input_min') input_max = tf.constant(ih, name='input_max') tf.quantization.fake_quant_with_min_max_vars( data, input_min, input_max, num_bits, narrow_range, 'fq') tf.compat.v1.global_variables_initializer() tf_net = sess.graph_def # reference graph to compare with IR ref_net = None if check_ir_version(10, None, ir_version) and not use_new_frontend: levels = 2**num_bits - int(narrow_range) # data (shape, value) -> const (shape, vale) -> data (shape, no value) const_for_layer_tests = lambda name, value: { **{ name + '_dd': { 'kind': 'data', 'value': value, 'shape': value.shape } }, **{ name: { 'kind': 'op', 'type': 'Const' } }, **shaped_data(name + '_d', int64_array(value.shape)) } connect_const_for_layer_tests = lambda first_tensor_name, second_tensor_name: [ *connect_front(first_tensor_name + '_dd', first_tensor_name), *connect(first_tensor_name, second_tensor_name) ] nodes = { **regular_op_with_shaped_data('parameter', [11], { 'type': 'Parameter' }), **const_for_layer_tests( 'il', np.array([nudged_il], dtype=np.float32)), **const_for_layer_tests( 'ih', np.array([nudged_ih], dtype=np.float32)), **const_for_layer_tests( 'ol', np.array([nudged_il], dtype=np.float32)), **const_for_layer_tests( 'oh', np.array([nudged_ih], dtype=np.float32)), **regular_op_with_shaped_data('fq', [11], { 'type': 'FakeQuantize', 'levels': levels }), **regular_op('result', {'type': 'Result'}), } edges = [ *connect('parameter', '0:fq'), *connect_const_for_layer_tests('il', '1:fq'), *connect_const_for_layer_tests('ih', '2:fq'), *connect_const_for_layer_tests('ol', '3:fq'), *connect_const_for_layer_tests('oh', '4:fq'), *connect('fq', 'result'), ] ref_net = build_graph(nodes, edges) return tf_net, ref_net
def create_reduce(self, shape, reshapped_shape, op, axes, keep_dims, ir_version): """ ONNX net IR net Input->Reduce Operation (axes)->Output => Input->Reduce Operation """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto if op not in ['ReduceMin', 'ReduceMax', 'ReduceMean', 'ReduceProd', 'ReduceSum']: raise ValueError("Operation has to be either Reduce(Min or Max or Mean or Sum or Prod") output_shape = shape.copy() for axis in axes: output_shape[axis] = 1 if not keep_dims: output_shape = [dim for dim in output_shape if dim != 1] input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, output_shape) node_def = onnx.helper.make_node( op, inputs=['input'], outputs=['output'], axes=axes, keepdims=keep_dims ) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # Please, specify 'type': 'Input' for input node # Moreover, do not forget to validate ALL layer attributes!!! # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': {'kind': 'op', 'type': 'Parameter'}, 'input_data': {'shape': shape, 'kind': 'data'}, 'input_data_1': {'shape': [len(axes)], 'value': axes, 'kind': 'data'}, 'const_1': {'kind': 'op', 'type': 'Const'}, 'const_data_1': {'shape': [len(axes)], 'kind': 'data'}, 'reduce': {'kind': 'op', 'type': op, 'keep_dims': keep_dims}, 'reduce_data': {'shape': output_shape, 'kind': 'data'}, 'result': {'kind': 'op', 'type': 'Result'} } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data_1', 'const_1'), ('const_1', 'const_data_1'), ('input_data', 'reduce'), ('const_data_1', 'reduce'), ('reduce', 'reduce_data'), ('reduce_data', 'result') ]) return onnx_net, ref_net
def create_neg(self, shape, ir_version): """ ONNX net IR net Input->Neg->Output => Input->Power(scale=-1, shift=0, power=1) """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, shape) node_reduce_mean_def = onnx.helper.make_node( 'Neg', inputs=['input'], outputs=['output'], ) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_reduce_mean_def], 'test_neg_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_neg_model') # # Create reference IR net # Please, specify 'type': 'Input' for input node # Moreover, do not forget to validate ALL layer attributes!!! # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'neg': { 'kind': 'op', 'type': 'Negative' }, 'neg_data': { 'shape': shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data', 'neg'), ('neg', 'neg_data'), ('neg_data', 'result')]) return onnx_net, ref_net
def create_net(self, shape, alpha, beta, bias, size, ir_version): """ ONNX net IR net Input->LRN->Output => Input->Norm->Power """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, shape) args = dict(size=size) if alpha: args['alpha'] = alpha if beta: args['beta'] = beta if bias: args['bias'] = bias node_def = onnx.helper.make_node('LRN', inputs=['input'], outputs=['output'], **args) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # Create reference IR net if not alpha: alpha = 0.0001 if not beta: beta = 0.75 if not bias: bias = 1.0 ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'const_indata': { 'value': [1], 'kind': 'data' }, 'const': { 'kind': 'op', 'type': 'Const' }, 'const_data': { 'shape': [1], 'kind': 'data' }, 'norm': { 'kind': 'op', 'type': 'LRN', 'alpha': alpha / bias, 'beta': beta, 'bias': bias, 'size': size }, # 'region': 'across' 'norm_data': { 'shape': shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } edges = [('input', 'input_data'), ('input_data', 'norm'), ('const_indata', 'const'), ('const', 'const_data'), ('const_data', 'norm'), ('norm', 'norm_data'), ('norm_data', 'result')] ref_net = build_graph(nodes_attributes, edges) return onnx_net, ref_net
def create_reduce_lp_const(self, shape, axes, keep_dims, reduce_p, ir_version): """ ONNX net IR net Input->ReduceLX(axes)->Output => Input->ReduceLX """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto output_shape = shape.copy() _axes = axes.copy() if axes is not None else list(range(len(shape))) for axis in _axes: output_shape[axis] = 1 if not keep_dims: output_shape = [dim for dim in output_shape if dim != 1] if len(output_shape) == 0: output_shape = [1] concat_axis = 0 concat_output_shape = output_shape.copy() concat_output_shape[concat_axis] *= 2 input = helper.make_tensor_value_info('input', TensorProto.FLOAT, output_shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, concat_output_shape) constant = np.random.randn(*shape).astype(np.float) node_const_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['const1'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.FLOAT, dims=constant.shape, vals=constant.flatten(), ), ) args = dict(keepdims=keep_dims) if axes: args['axes'] = axes node_def = onnx.helper.make_node( "ReduceL" + str(reduce_p), inputs=['const1'], outputs=['reduce'], **args ) node_concat_def = onnx.helper.make_node( 'Concat', inputs=['input', 'reduce'], outputs=['output'], axis=concat_axis ) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_const_def, node_def, node_concat_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # Please, specify 'type': 'Input' for input node # Moreover, do not forget to validate ALL layer attributes!!! # constant = np.power(np.sum(a=np.abs(np.power(constant, reduce_p)), axis=tuple(_axes), keepdims=keep_dims), 1 / reduce_p) ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': {'kind': 'op', 'type': 'Parameter'}, 'input_data': {'shape': output_shape, 'kind': 'data'}, 'input_const_data': {'kind': 'data', 'value': constant.flatten()}, 'const': {'kind': 'op', 'type': 'Const'}, 'const_data': {'shape': constant.shape, 'kind': 'data'}, 'concat': {'kind': 'op', 'type': 'Concat', 'axis': concat_axis}, 'concat_data': {'shape': concat_output_shape, 'kind': 'data'}, 'result': {'kind': 'op', 'type': 'Result'} } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_const_data', 'const'), ('const', 'const_data'), ('input_data', 'concat'), ('const_data', 'concat'), ('concat', 'concat_data'), ('concat_data', 'result') ]) return onnx_net, ref_net
def create_net_with_unary_op(self, shape, ir_version, op_type, use_new_frontend): """ Tensorflow net IR net Input->UnaryOp => Input->UnaryOp """ import tensorflow as tf self.current_op_type = op_type op_type_to_tf = { 'Abs': tf.math.abs, 'Acos': tf.math.acos, 'Acosh': tf.math.acosh, 'Asin': tf.math.asin, 'Asinh': tf.math.asinh, 'Atan': tf.math.atan, 'Atanh': tf.math.atanh, 'Ceiling': tf.math.ceil, 'Cos': tf.math.cos, 'Cosh': tf.math.cosh, 'Elu': tf.nn.elu, 'Exp': tf.math.exp, 'Floor': tf.math.floor, 'Log': tf.math.log, 'LogicalNot': tf.math.logical_not, 'Negative': tf.math.negative, 'Sigmoid': tf.nn.sigmoid, 'Sign': tf.math.sign, 'Sin': tf.math.sin, 'Sinh': tf.math.sinh, 'SoftPlus': tf.nn.softplus, 'Tan': tf.math.tan, 'Tanh': tf.math.tanh, 'ReLU': tf.nn.relu, } # # Create Tensorflow model # tf.compat.v1.reset_default_graph() type = tf.float32 if op_type == "LogicalNot": type = tf.bool # Create the graph and model with tf.compat.v1.Session() as sess: tf_x_shape = shape.copy() tf_x_shape = permute_nchw_to_nhwc(tf_x_shape, use_new_frontend) input = tf.compat.v1.placeholder(type, tf_x_shape, 'Input') op_type_to_tf[self.current_op_type](input, name='Operation') tf.compat.v1.global_variables_initializer() tf_net = sess.graph_def # # Create reference IR net # Please, specify 'type': 'Input' for input node # Moreover, do not forget to validate ALL layer attributes!!! # ref_net = None if check_ir_version(10, None, ir_version) and not use_new_frontend: nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'testing_op': { 'kind': 'op', 'type': self.current_op_type }, 'testing_data': { 'shape': shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data', 'testing_op'), ('testing_op', 'testing_data'), ('testing_data', 'result')]) return tf_net, ref_net
def create_global_net(self, shape, op, ir_version): """ ONNX net IR net Input->GlobalPooling>Output => Input->Pooling """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto out_shape = np.ones(len(shape)) out_shape[:2] = np.array(shape)[:2] out_shape = out_shape.astype(np.int).tolist() input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, out_shape) node_def = onnx.helper.make_node(op, inputs=['input'], outputs=['output']) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'input_axes_data': { 'kind': 'data', 'value': list(range(2, len(shape))) }, 'axes': { 'kind': 'op', 'type': 'Const' }, 'axes_data': { 'shape': [len(shape) - 2], 'kind': 'data' }, 'node': { 'kind': 'op', 'type': None }, 'node_data': { 'shape': out_shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } if op == 'GlobalAveragePool': nodes_attributes['node']['type'] = 'ReduceMean' else: nodes_attributes['node']['type'] = 'ReduceMax' ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data', 'node'), ('input_axes_data', 'axes'), ('axes', 'axes_data'), ('axes_data', 'node'), ('node', 'node_data'), ('node_data', 'result')]) return onnx_net, ref_net
def create_net(self, shape, axis, indices, output_shape, ir_version): """ ONNX net IR net Input->Gather->Output => Input->Gather """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto indices = np.array(indices) input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, output_shape) node_indices_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['indices'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.INT64, dims=indices.shape, vals=indices.flatten(), ), ) args = dict() if axis: args['axis'] = axis else: axis = 0 node_def = onnx.helper.make_node('Gather', inputs=['input', 'indices'], outputs=['output'], **args) # Create the graph (GraphProto) graph_def = helper.make_graph([node_indices_def, node_def], 'test_model', [input], [output]) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'input_const_data': { 'kind': 'data', 'value': indices.flatten() }, 'const': { 'kind': 'op', 'type': 'Const' }, 'const_data': { 'shape': indices.shape, 'kind': 'data', 'value': None }, 'input_axis_const_data': { 'kind': 'data', 'value': [axis] }, 'axis_const': { 'kind': 'op', 'type': 'Const' }, 'axis_const_data': { 'shape': [], 'kind': 'data', 'value': None }, 'node': { 'kind': 'op', 'type': 'Gather' }, 'node_data': { 'shape': output_shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_const_data', 'const'), ('const', 'const_data'), ('input_axis_const_data', 'axis_const'), ('axis_const', 'axis_const_data'), ('input_data', 'node'), ('const_data', 'node'), ('axis_const_data', 'node'), ('node', 'node_data'), ('node_data', 'result')]) return onnx_net, ref_net
def create_net(self, shape, slope_shape, precision, ir_version, opset=None): """ ONNX net IR net Input->PRelu->Output => Input->PReLU """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, shape) const = np.random.randn(*slope_shape).astype(np.float32) node_slope_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['slope'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.FLOAT, dims=const.shape, vals=const.flatten(), ), ) node_def = onnx.helper.make_node('PRelu', inputs=['input', 'slope'], outputs=['output']) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_slope_def, node_def], 'test_model', [input], [output], ) # Create the model (ModelProto) args = dict(producer_name='test_model') if opset: args['opset_imports'] = [helper.make_opsetid("", opset)] onnx_net = helper.make_model(graph_def, **args) # # Create reference IR net # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'weights_indata': { 'kind': 'data', 'value': const.flatten() }, 'weights': { 'kind': 'op', 'type': 'Const' }, 'weights_data': { 'kind': 'data', 'shape': [len(const.flatten())] }, 'node': { 'kind': 'op', 'type': 'PReLU' }, 'node_data': { 'shape': shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data', 'node'), ('weights_indata', 'weights'), ('weights', 'weights_data'), ('weights_data', 'node'), ('node', 'node_data'), ('node_data', 'result')]) return onnx_net, ref_net
def create_net_const(self, shape, axis, indices, output_shape, ir_version): """ ONNX net IR net Input->Concat(+gathered const)->Output => Input->Concat(+const) """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto indices = np.array(indices) concat_axis = 0 input_shape = output_shape.copy() concat_output_shape = output_shape.copy() concat_output_shape[concat_axis] = 2 * concat_output_shape[concat_axis] input = helper.make_tensor_value_info('input', TensorProto.FLOAT, input_shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, concat_output_shape) constant = np.random.randint(-127, 127, shape).astype(np.float) node_const_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['const1'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.FLOAT, dims=constant.shape, vals=constant.flatten(), ), ) node_indices_def = onnx.helper.make_node( 'Constant', inputs=[], outputs=['indices'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.INT64, dims=indices.shape, vals=indices.flatten(), ), ) args = dict() if axis: args['axis'] = axis node_def = onnx.helper.make_node('Gather', inputs=['const1', 'indices'], outputs=['gather'], **args) node_concat_def = onnx.helper.make_node('Concat', inputs=['input', 'gather'], outputs=['output'], axis=concat_axis) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_const_def, node_indices_def, node_def, node_concat_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # constant = np.take(constant, indices, axis=axis if axis else 0) ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': input_shape, 'kind': 'data' }, 'input_const_data': { 'kind': 'data', 'value': constant.flatten() }, 'const': { 'kind': 'op', 'type': 'Const' }, 'const_data': { 'shape': constant.shape, 'kind': 'data', 'value': None }, 'concat': { 'kind': 'op', 'type': 'Concat', 'axis': concat_axis }, 'concat_data': { 'shape': concat_output_shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_const_data', 'const'), ('const', 'const_data'), ('input_data', 'concat'), ('const_data', 'concat'), ('concat', 'concat_data'), ('concat_data', 'result')]) return onnx_net, ref_net
def create_net_one_const(self, shape1, shape2, ir_version): """ ONNX net IR net Input->Xor with const->Output => Input->LogicalXor """ # # Create ONNX model # from onnx import helper from onnx import TensorProto input = helper.make_tensor_value_info('input', TensorProto.BOOL, shape1) output = helper.make_tensor_value_info('output', TensorProto.BOOL, shape1) const = np.random.randint(0, 2, shape2).astype(np.bool) node_const_def = helper.make_node( 'Constant', inputs=[], outputs=['const'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.BOOL, dims=const.shape, vals=const.flatten(), ), ) node_def = helper.make_node( 'Xor', inputs=['input', 'const'], outputs=['output'] ) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_const_def, node_def], 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # Create reference IR net ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': {'kind': 'op', 'type': 'Parameter'}, 'input_data': {'shape': shape1, 'kind': 'data'}, 'input_const_data': {'kind': 'data', 'value': const.flatten()}, 'const': {'kind': 'op', 'type': 'Const'}, 'const_data': {'shape': const.shape, 'kind': 'data'}, 'node': {'kind': 'op', 'type': 'LogicalXor'}, 'node_data': {'shape': shape1, 'kind': 'data'}, 'result': {'kind': 'op', 'type': 'Result'} } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_const_data', 'const'), ('const', 'const_data'), ('input_data', 'node'), ('const_data', 'node'), ('node', 'node_data'), ('node_data', 'result')]) return onnx_net, ref_net
def create_net(self, shape, axis, keepdims, ir_version): """ ONNX net IR net Input->ArgMax->Output => Input->TopK """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto output_shape = shape.copy() output_shape[axis if axis is not None else 0] = 1 output_shape_squeeze = output_shape.copy() if keepdims == 0: output_shape_squeeze.remove(1) input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.INT64, output_shape_squeeze) const = np.random.randint(-10, 10, output_shape_squeeze).astype(np.int64) args = dict() if axis is not None: args['axis'] = axis else: axis = 0 if keepdims is not None: args['keepdims'] = keepdims node_def = onnx.helper.make_node( 'ArgMax', inputs=['input'], outputs=['argmax' if keepdims is None or keepdims == 1 else 'output'], **args ) edges = [node_def] if keepdims is None or keepdims == 1: node_flatten_def = onnx.helper.make_node( 'Flatten', inputs=['argmax'], outputs=['output'] ) edges.append(node_flatten_def) # Create the graph (GraphProto) graph_def = helper.make_graph( edges, 'test_model', [input], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # # Create reference IR net # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': {'kind': 'op', 'type': 'Parameter'}, 'input_data': {'shape': shape, 'kind': 'data'}, 'const_indata': {'shape': [1], 'kind': 'data'}, 'const': {'kind': 'op', 'type': 'Const'}, 'const_data': {'shape': [], 'kind': 'data'}, # TODO shape [] or [1] ?? 'node': {'kind': 'op', 'type': 'TopK'}, 'node_data': {'shape': output_shape, 'kind': 'data'}, 'indices_data': {'shape': output_shape, 'kind': 'data'}, 'result1': {'kind': 'op', 'type': 'Result'}, 'result2': {'kind': 'op', 'type': 'Result'} } edges = [('input', 'input_data'), ('const_indata', 'const'), ('const', 'const_data'), ('input_data', 'node'), ('const_data', 'node'), ('node', 'node_data'), ('node', 'indices_data'), ('node_data', 'result1')] if keepdims == 0: nodes_attributes.update({'squeeze_const_indata': {'shape': [1], 'kind': 'data'}, 'squeeze_const': {'kind': 'op', 'type': 'Const'}, 'squeeze_const_data': {'shape': [1], 'kind': 'data'}, 'squeeze': {'kind': 'op', 'type': 'Squeeze'}, 'squeeze_data': {'shape': output_shape_squeeze, 'kind': 'data'} }) edges.extend([('squeeze_const_indata', 'squeeze_const'), ('squeeze_const', 'squeeze_const_data'), ('indices_data', 'squeeze'), ('squeeze_const_data', 'squeeze'), ('squeeze', 'squeeze_data'), ('squeeze_data', 'result2')]) else: nodes_attributes.update( {'flatten_const_indata': {'kind': 'data', 'value': [0, -1]}, 'flatten_const': {'kind': 'op', 'type': 'Const'}, 'flatten_const_data': {'shape': [2], 'kind': 'data'}, 'flatten': {'kind': 'op', 'type': 'Reshape'}, 'flatten_data': {'shape': [output_shape_squeeze[0], np.prod(output_shape_squeeze[1:])], 'kind': 'data'} }) edges.extend([('indices_data', 'flatten'), ('flatten_const_indata', 'flatten_const'), ('flatten_const', 'flatten_const_data'), ('flatten_const_data', 'flatten'), ('flatten', 'flatten_data'), ('flatten_data', 'result2')]) ref_net = build_graph(nodes_attributes, edges) return onnx_net, ref_net
def create_net(self, condition_shape, shape_than, else_shape, ir_version): """ ONNX net IR net Input->Where->Output => Input->Select """ # # Create ONNX model # from onnx import helper from onnx import TensorProto input_cond = helper.make_tensor_value_info('input_cond', TensorProto.BOOL, condition_shape) input_than = helper.make_tensor_value_info('input_than', TensorProto.BOOL, shape_than) input_else = helper.make_tensor_value_info('input_else', TensorProto.BOOL, else_shape) output = helper.make_tensor_value_info('output', TensorProto.BOOL, condition_shape) node_def = helper.make_node( 'Where', inputs=['input_cond', 'input_than', 'input_else'], outputs=['output']) # Create the graph (GraphProto) graph_def = helper.make_graph( [node_def], 'test_model', [input_cond, input_than, input_else], [output], ) # Create the model (ModelProto) onnx_net = helper.make_model(graph_def, producer_name='test_model') # Create reference IR net ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input_cond': { 'kind': 'op', 'type': 'Parameter' }, 'input_cond_data': { 'shape': condition_shape, 'kind': 'data' }, 'input_than': { 'kind': 'op', 'type': 'Parameter' }, 'input_than_data': { 'shape': shape_than, 'kind': 'data' }, 'input_else': { 'kind': 'op', 'type': 'Parameter' }, 'input_else_data': { 'shape': else_shape, 'kind': 'data' }, 'node': { 'kind': 'op', 'type': 'Select' }, 'node_data': { 'shape': condition_shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } ref_net = build_graph(nodes_attributes, [('input_cond', 'input_cond_data'), ('input_than', 'input_than_data'), ('input_else', 'input_else_data'), ('input_cond_data', 'node'), ('input_than_data', 'node'), ('input_else_data', 'node'), ('node', 'node_data'), ('node_data', 'result')]) return onnx_net, ref_net
def create_log_softmax_net(self, shape, reduction_axis, ir_version, use_new_frontend): """ Tensorflow net IR net Input->LogSoftmax => Input->Softmax->Log """ # # Create Tensorflow model # import tensorflow as tf tf.compat.v1.reset_default_graph() # Create the graph and model with tf.compat.v1.Session() as sess: tf_x_shape = shape.copy() tf_x_shape = permute_nchw_to_nhwc(tf_x_shape, use_new_frontend) input = tf.compat.v1.placeholder(tf.float32, tf_x_shape, 'Input') if LooseVersion(tf.__version__) < LooseVersion('2.0.0'): tf.nn.log_softmax(input, name='Operation', axis=reduction_axis) else: tf.nn.log_softmax(input, axis=reduction_axis, name='Operation') tf.compat.v1.global_variables_initializer() tf_net = sess.graph_def ref_net = None reduce_sum_shape = np.copy(shape) rank = len(shape) if rank in {4, 5}: reduction_axis = reduction_axis if reduction_axis >= 0 else rank + reduction_axis if rank == 4: reduction_axis = {0: 0, 1: 2, 2: 3, 3: 1}[reduction_axis] else: reduction_axis = {0: 0, 1: 2, 2: 3, 3: 4, 4: 1}[reduction_axis] reduce_sum_shape[reduction_axis] = 1 converted_shape = shape if rank != 1 else shape[0] if check_ir_version(10, None, ir_version) and not use_new_frontend: ref_nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter', 'shape': converted_shape }, 'input_data': { 'shape': shape, 'kind': 'data', 'value': None }, 'reduce_max_axis_val': { 'shape': int64_array([reduction_axis]).shape, 'kind': 'data', 'value': int64_array([reduction_axis]) }, 'reduce_max_axis': { 'type': 'Const', 'kind': 'op', 'shape': 1 }, 'reduce_max_axis_data': { 'shape': int64_array([1]), 'kind': 'data', 'value': None }, 'reduce_max': { 'type': 'ReduceMax', 'kind': 'op', 'keep_dims': True }, 'reduce_max_data': { 'shape': reduce_sum_shape, 'kind': 'data', 'value': None }, 'sub_first': { 'type': 'Subtract', 'kind': 'op' }, 'sub_first_data': { 'shape': shape, 'kind': 'data', 'value': None }, 'reduce_sum_axis_val': { 'shape': int64_array([reduction_axis]).shape, 'kind': 'data', 'value': int64_array([reduction_axis]) }, 'reduce_sum_axis': { 'type': 'Const', 'kind': 'op', 'shape': 1 }, 'reduce_sum_axis_data': { 'shape': int64_array([1]), 'kind': 'data', 'value': None }, 'reduce_sum': { 'type': 'ReduceSum', 'kind': 'op', 'keep_dims': True }, 'reduce_sum_data': { 'shape': reduce_sum_shape, 'kind': 'data', 'value': None }, 'exp': { 'type': 'Exp', 'kind': 'op' }, 'exp_data': { 'shape': shape, 'kind': 'data', 'value': None }, 'log': { 'type': 'Log', 'kind': 'op' }, 'log_data': { 'shape': reduce_sum_shape, 'kind': 'data', 'value': None }, 'sub_second': { 'type': 'Subtract', 'kind': 'op' }, 'sub_second_data': { 'shape': shape, 'kind': 'data', 'value': None }, 'result': { 'kind': 'op', 'type': 'Result' }, } ref_edges = [ ('input', 'input_data'), ('reduce_max_axis_val', 'reduce_max_axis'), ('reduce_max_axis', 'reduce_max_axis_data'), ('reduce_max_axis_data', 'reduce_max', { 'in': 1 }), ('reduce_max', 'reduce_max_data'), ('input_data', 'reduce_max', { 'out': 0, 'in': 0 }), ('input_data', 'sub_first', { 'out': 0, 'in': 0 }), ('reduce_max_data', 'sub_first', { 'in': 1 }), ('sub_first', 'sub_first_data'), ('reduce_sum_axis_val', 'reduce_sum_axis'), ('reduce_sum_axis', 'reduce_sum_axis_data'), ('reduce_sum_axis_data', 'reduce_sum', { 'in': 1 }), ('reduce_sum', 'reduce_sum_data'), ('sub_first_data', 'exp'), ('exp', 'exp_data'), ('exp_data', 'reduce_sum', { 'in': 0 }), ('reduce_sum_data', 'log'), ('log', 'log_data'), ('log_data', 'sub_second', { 'in': 1 }), ('sub_second', 'sub_second_data'), ('sub_first_data', 'sub_second', { 'out': 0, 'in': 0 }), ('sub_second_data', 'result'), ] ref_net = build_graph(ref_nodes_attributes, ref_edges) return tf_net, ref_net
def create_net(self, shape, mode, pads, value, ir_version, opset=None): """ ONNX net IR net Input->Pad->Output => Input->Pad """ # # Create ONNX model # import onnx from onnx import helper from onnx import TensorProto _pads = np.array(pads).reshape([2, -1]) output_shape = (np.array(shape) + _pads[0, :] + _pads[1, :]).tolist() input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape) output = helper.make_tensor_value_info('output', TensorProto.FLOAT, output_shape) nodes = [] if opset is not None and opset < 11: args = dict(pads=pads) if mode: args['mode'] = mode if value: args['value'] = value node_def = onnx.helper.make_node('Pad', inputs=['input'], outputs=['pad'], **args) nodes.append(node_def) else: node_pads_def = helper.make_node( 'Constant', inputs=[], outputs=['pads'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.INT64, dims=[len(pads)], vals=pads, ), ) inputs = ['input', 'pads'] if value is not None: node_value_def = helper.make_node( 'Constant', inputs=[], outputs=['value'], value=helper.make_tensor( name='const_tensor', data_type=TensorProto.FLOAT, dims=[], vals=[value], ), ) inputs.append('value') nodes.append(node_value_def) args = dict() if mode: args['mode'] = mode node_def = onnx.helper.make_node('Pad', inputs=inputs, outputs=['pad'], **args) nodes.extend([node_pads_def, node_def]) sigmoid_def = onnx.helper.make_node('Elu', inputs=['pad'], outputs=['output']) nodes.append(sigmoid_def) # Create the graph (GraphProto) graph_def = helper.make_graph( nodes, 'test_model', [input], [output], ) # Create the model (ModelProto) args = dict(producer_name='test_model') if opset: args['opset_imports'] = [helper.make_opsetid("", opset)] onnx_net = helper.make_model(graph_def, **args) # # Create reference IR net # ref_net = None if check_ir_version(10, None, ir_version): nodes_attributes = { 'input': { 'kind': 'op', 'type': 'Parameter' }, 'input_data': { 'shape': shape, 'kind': 'data' }, 'pads_begin_indata': { 'value': _pads[0, :], 'kind': 'data' }, 'pads_begin': { 'kind': 'op', 'type': 'Const' }, 'pads_begin_data': { 'shape': [len(_pads[0, :])], 'kind': 'data' }, 'pads_end_indata': { 'value': _pads[1, :], 'kind': 'data' }, 'pads_end': { 'kind': 'op', 'type': 'Const' }, 'pads_end_data': { 'shape': [len(_pads[1, :])], 'kind': 'data' }, 'node': { 'kind': 'op', 'type': 'Pad', 'pad_mode': 'constant' if not mode else mode }, 'node_data': { 'shape': output_shape, 'kind': 'data' }, 'elu': { 'kind': 'op', 'type': 'Elu' }, 'elu_data': { 'shape': output_shape, 'kind': 'data' }, 'result': { 'kind': 'op', 'type': 'Result' } } edges = [('input', 'input_data'), ('input_data', 'node'), ('pads_begin_indata', 'pads_begin'), ('pads_begin', 'pads_begin_data'), ('pads_begin_data', 'node'), ('pads_end_indata', 'pads_end'), ('pads_end', 'pads_end_data'), ('pads_end_data', 'node'), ('node', 'node_data'), ('node_data', 'elu'), ('elu', 'elu_data'), ('elu_data', 'result')] if mode in (None, "constant"): nodes_attributes.update({ 'const_node_indata': { 'value': value, 'kind': 'data' }, 'const_node': { 'kind': 'op', 'type': 'Const' }, 'const_node_data': { 'shape': None, 'kind': 'data' } }) edges += [('const_node_indata', 'const_node'), ('const_node', 'const_node_data'), ('const_node_data', 'node')] ref_net = build_graph(nodes_attributes, edges) return onnx_net, ref_net
def create_topK_net(shape, k, ir_version, use_new_frontend): """ Tensorflow net: |-> Values Input -> TopK | |-> Indices IR net: |-> Values Input -> TopK | |-> Indices """ # # Create Tensorflow model # import tensorflow as tf tf.compat.v1.reset_default_graph() # Create the graph and model with tf.compat.v1.Session() as sess: shape_net = permute_nchw_to_nhwc(shape) input_tensor = tf.compat.v1.placeholder(tf.int32, shape=shape_net, name='Input') values, indices = tf.nn.top_k(input_tensor, k=k, sorted=True, name='Operation') tf.compat.v1.global_variables_initializer() tf_net = sess.graph_def # # Create reference IR net # topk_output_shape = shape.copy() inverse_nhwc_nchw = PermuteAttrs.get_nhwc_to_nchw_permutation(len(topk_output_shape)).inv topk_axis = permute_axis(len(topk_output_shape) - 1, inverse_nhwc_nchw) # we need to permute axis attribute topk_output_shape[topk_axis] = k ref_net = None if check_ir_version(10, None, ir_version) and not use_new_frontend: nodes_attributes = { 'input': {'kind': 'op', 'type': 'Parameter'}, 'input_data': {'shape': shape, 'kind': 'data'}, 'Const_k_input_data': {'shape': [], 'kind': 'data'}, 'Const_k': {'kind': 'op', 'type': 'Const'}, 'Const_k_data': {'shape': [], 'kind': 'data'}, 'TopK': {'kind': 'op', 'type': 'TopK', 'axis': topk_axis, 'mode': 'max', 'sort': 'value'}, 'TopK_data_1': {'shape': topk_output_shape, 'kind': 'data'}, 'TopK_data_2': {'shape': topk_output_shape, 'kind': 'data'}, 'result_1': {'kind': 'op', 'type': 'Result'}, 'result_2': {'kind': 'op', 'type': 'Result'}, } ref_net = build_graph(nodes_attributes, [('input', 'input_data'), ('input_data', 'TopK', {'in': 0}), ('Const_k_input_data', 'Const_k'), ('Const_k', 'Const_k_data'), ('Const_k_data', 'TopK', {'in': 1}), ('TopK', 'TopK_data_1', {'out': 0}), ('TopK', 'TopK_data_2', {'out': 1}), ('TopK_data_1', 'result_1'), ('TopK_data_2', 'result_2'), ]) return tf_net, ref_net