def test_dequantize_with_axis(self, input_shape, scale_param_value,
                                  zero_param_value, target_shape, axis):
        graph = build_graph(nodes1_attributes, [
            ('input', 'input_data'),
            ('input_data', 'dequantize'),
            ('dequantize', 'dequantize_data'),
            ('scale_param_dq', 'scale_param_dq_data'),
            ('zerop_param_dq', 'zerop_param_dq_data'),
            ('scale_param_dq_data', 'dequantize'),
            ('zerop_param_dq_data', 'dequantize'),
            ('dequantize_data', 'out'),
            ('out', 'out_data'),
            ('out_data', 'result'),
        ], {
            'input_data': {
                'shape': input_shape
            },
            'dequantize': {
                'axis': axis
            },
            'scale_param_dq': {
                'shape': scale_param_value.shape,
                'value': scale_param_value
            },
            'scale_param_dq_data': {
                'shape': scale_param_value.shape,
                'value': scale_param_value
            },
            'zerop_param_dq': {
                'shape': zero_param_value.shape,
                'value': zero_param_value
            },
            'zerop_param_dq_data': {
                'shape': zero_param_value.shape,
                'value': zero_param_value
            },
        },
                            nodes_with_edges_only=True)

        graph_ref = build_graph(nodes_ref_attributes, [
            ('input', 'input_data'),
            ('input_data', 'cast'),
            ('cast', 'cast_data'),
            ('cast_data', 'sub'),
            ('zerop_param_dq', 'zerop_param_dq_data'),
            ('zerop_param_dq_data', 'sub_reshape'),
            ('sub_reshape_const', 'sub_reshape_const_data'),
            ('sub_reshape_const_data', 'sub_reshape'),
            ('sub_reshape', 'sub_reshape_data'),
            ('sub_reshape_data', 'sub'),
            ('sub', 'sub_data'),
            ('sub_data', 'mul'),
            ('scale_param_dq', 'scale_param_dq_data'),
            ('scale_param_dq_data', 'mul_reshape'),
            ('mul_reshape_const', 'mul_reshape_const_data'),
            ('mul_reshape_const_data', 'mul_reshape'),
            ('mul_reshape', 'mul_reshape_data'),
            ('mul_reshape_data', 'mul'),
            ('mul', 'mul_data'),
            ('mul_data', 'out'),
            ('out', 'out_data'),
            ('out_data', 'result'),
        ], {
            'input_data': {
                'shape': input_shape
            },
            'scale_param_dq': {
                'shape': scale_param_value.shape,
                'value': scale_param_value
            },
            'scale_param_dq_data': {
                'shape': scale_param_value.shape,
                'value': scale_param_value
            },
            'zerop_param_dq': {
                'shape': zero_param_value.shape,
                'value': zero_param_value
            },
            'zerop_param_dq_data': {
                'shape': zero_param_value.shape,
                'value': zero_param_value
            },
            'sub_reshape_const_data': {
                'shape': target_shape.shape,
                'value': target_shape
            },
            'mul_reshape_const_data': {
                'shape': target_shape.shape,
                'value': target_shape
            },
        },
                                nodes_with_edges_only=True)

        graph.stage = 'middle'
        DequantizeLinearResolver().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'out',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Esempio n. 2
0
    def test_mega_hardcore(self):
        #   ORIGINAL GRAPH
        #
        #   data1(1,3,64,64)---,->Eltwise1->data(1,3,64,64)-----,->Eltwise2->data(1,3,64,64)---,->Eltwise4->data(1,3,64,64)
        #                     /\                               /\                             /\
        #   data2(64,1)-----,-'--------------------------------'------------------------------'
        #                  \/                                 /
        #   data3(64,1)----`-->Eltwise3->data(64,1)----------'
        #
        #   REFERENCE GRAPH AFTER TRANSFORMATION
        #
        #   data1(1,3,64,64)---------------------,->Eltwise1->data(1,3,64,64)-----,->Eltwise2->data(1,3,64,64)---,->Eltwise4->data(1,3,64,64)
        #                                       /\                               /\                              /\
        #   data2(64,1)-,- Reshape1(1,1,64,64)--'--------------------------------o-------------------------------'
        #               |                                                        |
        #               |                                                Reshape(1,1,64,1)
        #              \/                                                        |
        #   data3(64,1)----------->Eltwise3->data(64,1)--------------------------'
        #
        graph = build_graph(nodes_attributes, [
            ('placeholder_1', 'placeholder_1_data'),
            ('placeholder_2', 'placeholder_2_data'),
            ('placeholder_3', 'placeholder_3_data'),
            ('placeholder_1_data', 'eltwise_1'),
            ('placeholder_2_data', 'eltwise_1'),
            ('eltwise_1', 'eltwise_1_data'),
            ('eltwise_1_data', 'eltwise_2'),
            ('placeholder_2_data', 'eltwise_3'),
            ('placeholder_3_data', 'eltwise_3'),
            ('eltwise_3', 'eltwise_3_data'),
            ('eltwise_3_data', 'eltwise_2'),
            ('eltwise_2', 'eltwise_2_data'),
            ('eltwise_2_data', 'eltwise_4'),
            ('placeholder_2_data', 'eltwise_4'),
            ('eltwise_4', 'eltwise_4_data'),
        ], {
            'placeholder_1_data': {
                'shape': np.array([1, 3, 64, 64])
            },
            'placeholder_2_data': {
                'shape': np.array([64, 1]),
                'value': np.ones([64, 1])
            },
            'placeholder_3_data': {
                'shape': np.array([64, 1])
            },
            'eltwise_1_data': {
                'shape': np.array([1, 3, 64, 64])
            },
            'eltwise_2_data': {
                'shape': np.array([1, 3, 64, 64])
            },
            'eltwise_3_data': {
                'shape': np.array([64, 1])
            },
            'eltwise_4_data': {
                'shape': np.array([1, 3, 64, 64])
            }
        },
                            nodes_with_edges_only=True)

        graph_ref = build_graph(nodes_attributes, [
            ('placeholder_1', 'placeholder_1_data'),
            ('placeholder_2', 'placeholder_2_data'),
            ('placeholder_3', 'placeholder_3_data'),
            ('placeholder_1_data', 'eltwise_1'),
            ('placeholder_2_data', 'reshape_1'),
            ('reshape_1_const', 'reshape_1_const_data'),
            ('reshape_1_const_data', 'reshape_1'),
            ('reshape_1', 'reshape_1_data'),
            ('reshape_1_data', 'eltwise_1'),
            ('eltwise_1', 'eltwise_1_data'),
            ('eltwise_1_data', 'eltwise_2'),
            ('placeholder_2_data', 'eltwise_3'),
            ('placeholder_3_data', 'eltwise_3'),
            ('eltwise_3', 'eltwise_3_data'),
            ('eltwise_3_data', 'reshape_2'),
            ('reshape_2_const', 'reshape_2_const_data'),
            ('reshape_2_const_data', 'reshape_2'),
            ('reshape_2', 'reshape_2_data'),
            ('reshape_2_data', 'eltwise_2'),
            ('eltwise_2', 'eltwise_2_data'),
            ('eltwise_2_data', 'eltwise_4'),
            ('reshape_1_data', 'eltwise_4'),
            ('eltwise_4', 'eltwise_4_data'),
        ], {
            'placeholder_1_data': {
                'shape': np.array([1, 3, 64, 64])
            },
            'placeholder_2_data': {
                'shape': np.array([64, 1]),
                'value': np.ones([64, 1])
            },
            'placeholder_3_data': {
                'shape': np.array([64, 1])
            },
            'reshape_1_const': {
                'value': int64_array([0, 1]),
                'shape': int64_array([2])
            },
            'reshape_1_const_data': {
                'value': int64_array([0, 1]),
                'shape': int64_array([2])
            },
            'reshape_1_data': {
                'shape': np.array([1, 1, 64, 1])
            },
            'reshape_2_const': {
                'value': int64_array([0, 1]),
                'shape': int64_array([2])
            },
            'reshape_2_const_data': {
                'value': int64_array([0, 1]),
                'shape': int64_array([2])
            },
            'reshape_2_data': {
                'shape': np.array([1, 1, 64, 1])
            },
            'eltwise_1_data': {
                'shape': np.array([1, 3, 64, 64])
            },
            'eltwise_2_data': {
                'shape': np.array([1, 3, 64, 64])
            },
            'eltwise_3_data': {
                'shape': np.array([64, 1])
            },
            'eltwise_4_data': {
                'shape': np.array([1, 3, 64, 64])
            }
        },
                                nodes_with_edges_only=True)

        normalize_eltwise_inputs(graph)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'eltwise_4',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Esempio n. 3
0
    def test7_axis1_not_constant(self):
        #
        #   data1(1,3,64,64)----.                                                    data(1,3,64,64)-------.
        #   data2(3,64,1)-------->Eltwise-->data(1,3,64,64)=> data(3,64,1)->Unsqueeze(0)->data(1,3,64,1)-->Eltwise->...
        #   data3(3,1)------'                                    data(3,1)->Unsqueeze(2, 0)->data(1,3,1,1)-'
        #
        graph = build_graph(nodes_attributes,
                            [('placeholder_1', 'placeholder_1_data'),
                             ('placeholder_2', 'placeholder_2_data'),
                             ('placeholder_3', 'placeholder_3_data'),
                             ('placeholder_1_data', 'eltwise_1'),
                             ('placeholder_2_data', 'eltwise_1'),
                             ('placeholder_3_data', 'eltwise_1'),
                             ('eltwise_1', 'eltwise_1_data')], {
                                 'placeholder_1_data': {
                                     'shape': np.array([1, 3, 64, 64])
                                 },
                                 'placeholder_2_data': {
                                     'shape': np.array([3, 64, 1])
                                 },
                                 'placeholder_3_data': {
                                     'shape': np.array([3, 1])
                                 },
                                 'eltwise_1_data': {
                                     'shape': np.array([1, 3, 64, 64])
                                 },
                                 'eltwise_1': {
                                     'axis': 1
                                 }
                             },
                            nodes_with_edges_only=True)

        graph_ref = build_graph(nodes_attributes,
                                [('placeholder_1', 'placeholder_1_data'),
                                 ('placeholder_2', 'placeholder_2_data'),
                                 ('placeholder_3', 'placeholder_3_data'),
                                 ('placeholder_1_data', 'eltwise_1'),
                                 ('placeholder_2_data', 'reshape_1'),
                                 ('reshape_1_const', 'reshape_1_const_data'),
                                 ('reshape_1_const_data', 'reshape_1'),
                                 ('placeholder_3_data', 'reshape_2'),
                                 ('reshape_2_const', 'reshape_2_const_data'),
                                 ('reshape_2_const_data', 'reshape_2'),
                                 ('reshape_1', 'reshape_1_data'),
                                 ('reshape_2', 'reshape_2_data'),
                                 ('reshape_1_data', 'eltwise_1'),
                                 ('reshape_2_data', 'eltwise_1'),
                                 ('eltwise_1', 'eltwise_1_data')], {
                                     'placeholder_1_data': {
                                         'shape': np.array([1, 3, 64, 64])
                                     },
                                     'placeholder_2_data': {
                                         'shape': np.array([3, 64, 1])
                                     },
                                     'placeholder_3_data': {
                                         'shape': np.array([3, 1])
                                     },
                                     'reshape_1_const': {
                                         'value': int64_array([0]),
                                         'shape': int64_array([1])
                                     },
                                     'reshape_1_const_data': {
                                         'value': int64_array([0]),
                                         'shape': int64_array([1])
                                     },
                                     'reshape_1_data': {
                                         'shape': np.array([1, 3, 64, 1])
                                     },
                                     'reshape_2_const': {
                                         'value': int64_array([2, 0]),
                                         'shape': int64_array([2])
                                     },
                                     'reshape_2_const_data': {
                                         'value': int64_array([2, 0]),
                                         'shape': int64_array([2])
                                     },
                                     'reshape_2_data': {
                                         'shape': np.array([1, 3, 1, 1])
                                     },
                                     'eltwise_1_data': {
                                         'shape': np.array([1, 3, 64, 64])
                                     }
                                 },
                                nodes_with_edges_only=True)

        normalize_eltwise_inputs(graph)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'eltwise_1',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Esempio n. 4
0
    def create_net(self, shape, ir_version):
        """
            ONNX net                   IR net

            Input->Not->Output   =>    Input->LogicalNot

        """

        #
        #   Create ONNX model
        #

        import onnx
        from onnx import helper
        from onnx import TensorProto

        input = helper.make_tensor_value_info('input', TensorProto.BOOL, shape)
        output = helper.make_tensor_value_info('output', TensorProto.BOOL,
                                               shape)

        node_def = onnx.helper.make_node('Not',
                                         inputs=['input'],
                                         outputs=['output'])

        # Create the graph (GraphProto)
        graph_def = helper.make_graph(
            [node_def],
            'test_model',
            [input],
            [output],
        )

        # Create the model (ModelProto)
        onnx_net = helper.make_model(graph_def, producer_name='test_model')

        #
        #   Create reference IR net
        #

        ref_net = None
        if check_ir_version(10, None, ir_version):
            nodes_attributes = {
                'input': {
                    'kind': 'op',
                    'type': 'Parameter'
                },
                'input_data': {
                    'shape': shape,
                    'kind': 'data'
                },
                'node': {
                    'kind': 'op',
                    'type': 'LogicalNot'
                },
                'node_data': {
                    'shape': shape,
                    'kind': 'data'
                },
                'result': {
                    'kind': 'op',
                    'type': 'Result'
                }
            }
            ref_net = build_graph(nodes_attributes, [('input', 'input_data'),
                                                     ('input_data', 'node'),
                                                     ('node', 'node_data'),
                                                     ('node_data', 'result')])

        return onnx_net, ref_net
Esempio n. 5
0
    def test_conv_infer_3D_convolution(self):
        graph = build_graph(
            nodes_attributes, [('conv_input', 'conv_node'),
                               ('conv_weights', 'conv_node'),
                               ('conv_node', 'conv_output'),
                               ('conv_output', 'op_output')],
            {
                'conv_output': {
                    'shape': None
                },
                'conv_input': {
                    'shape': int64_array([1, 3, 16, 224, 224])
                },
                'conv_weights': {
                    'shape':
                    int64_array([3, 64, 1, 7, 7]),
                    'dim_attrs':
                    ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']
                },
                'conv_node': {
                    'type': 'Convolution',
                    'bias_term': None,
                    'stride': None,
                    'dilation': None,
                    'batch_dims': int64_array([0]),
                    'channel_dims': int64_array([1]),
                    'output_spatial_shape': None,
                    'input_feature_channel': 0,
                    'output_feature_channel': 1,
                    'group': 1,
                    'output_shape': None,
                    'layout': 'NCHW'
                }
            })

        conv_node = Node(graph, 'conv_node')
        conv_output = Node(graph, 'conv_output')

        Convolution.infer(conv_node)

        # Check bias_term attribute
        self.assertTrue(conv_node.has_valid('bias_term'))
        self.assertTrue(not conv_node.bias_term)
        # Check kernel_spatial_idx attr detection
        self.assertTrue(conv_node.has_valid('kernel_spatial_idx'))
        self.assertTrue(
            np.array_equal(int64_array([2, 3, 4]),
                           conv_node.kernel_spatial_idx))
        # Check spatial_dims attr detection
        self.assertTrue(conv_node.has_valid('spatial_dims'))
        self.assertTrue(
            np.array_equal(int64_array([2, 3, 4]), conv_node.spatial_dims))
        # Check kernel_spatial attr detection
        self.assertTrue(conv_node.has_valid('kernel_spatial'))
        self.assertTrue(
            np.array_equal(int64_array([1, 7, 7]), conv_node.kernel_spatial))
        # Check output attribute
        self.assertTrue(conv_node.has_valid('output'))
        self.assertEqual(64, conv_node.output)
        # Check dilation value. Should be set to default
        self.assertTrue(conv_node.has_valid('dilation'))
        self.assertTrue(
            np.array_equal(int64_array([1, 1, 1, 1, 1]), conv_node.dilation))
        # Check stride value. Should be set to default
        self.assertTrue(conv_node.has_valid('stride'))
        self.assertTrue(
            np.array_equal(int64_array([1, 1, 1, 1, 1]), conv_node.stride))
        # Check pad value. Should be set to default
        self.assertTrue(conv_node.has_valid('pad'))
        self.assertTrue(
            np.array_equal(
                int64_array([[0, 0], [0, 0], [0, 0], [0, 0], [0, 0]]),
                conv_node.pad))
        # Check pad_spatial_shape
        self.assertTrue(conv_node.has_valid('pad_spatial_shape'))
        self.assertTrue(
            np.array_equal(int64_array([[0, 0], [0, 0], [0, 0]]),
                           conv_node.pad_spatial_shape))
        # Check resulting output shape
        self.assertTrue(
            np.array_equal(int64_array([1, 64, 16, 218, 218]),
                           conv_output.shape))
Esempio n. 6
0
    def create_net(self, shape, softmax_axis, ir_version):
        """
            ONNX net                       IR net

            Input->Softmax->Output   =>    Input->Reshape->SoftMax->Reshape

        """

        #
        #   Create ONNX model
        #

        import onnx
        from onnx import helper
        from onnx import TensorProto

        input = helper.make_tensor_value_info('input', TensorProto.FLOAT, shape)
        output = helper.make_tensor_value_info('output', TensorProto.FLOAT, shape)

        node_def = onnx.helper.make_node(
            'Softmax',
            inputs=['input'],
            outputs=['output'],
            axis=softmax_axis
        )

        # Create the graph (GraphProto)
        graph_def = helper.make_graph(
            [node_def],
            'test_model',
            [input],
            [output],
        )

        # Create the model (ModelProto)
        onnx_net = helper.make_model(graph_def, producer_name='test_model')

        #
        #   Create reference IR net
        #

        ref_net = None

        converted_shape = shape if len(shape) != 1 else shape[0]
        flatten_shape = get_flatten_shape(shape, softmax_axis)
        reshape_data_val = second_input_data_of_reshape(shape, softmax_axis)

        if check_ir_version(10, None, ir_version):
            if len(shape) == 2 and shape == flatten_shape:
                ref_nodes_attributes = {
                    'input': {'kind': 'op', 'type': 'Parameter', 'shape': converted_shape},
                    'input_data': {'shape': shape, 'kind': 'data', 'value': None},
                    'flatten_shape_val': {'shape': int64_array(reshape_data_val).shape,
                                          'kind': 'data',
                                          'value': int64_array(reshape_data_val)},
                    'flatten_shape': {'type': 'Const', 'kind': 'op', 'shape': 2},
                    'flatten_shape_data': {'shape': int64_array([2]), 'kind': 'data', 'value': None},
                    'reshape': {'kind': 'op', 'type': 'Reshape'},
                    'reshape_data': {'kind': 'data', 'shape': flatten_shape, 'value': None},
                    'softmax': {'type': 'SoftMax', 'kind': 'op', 'axis': 1},
                    'softmax_data': {'shape': flatten_shape, 'kind': 'data', 'value': None},
                    'result': {'kind': 'op', 'type': 'Result'},
                }

                ref_edges = [
                    ('input', 'input_data'),
                    ('flatten_shape_val', 'flatten_shape'),
                    ('flatten_shape', 'flatten_shape_data'),
                    ('flatten_shape_data', 'reshape', {'in': 1}),
                    ('input_data', 'reshape', {'in': 0}),
                    ('reshape', 'reshape_data'),
                    ('reshape_data', 'softmax'),
                    ('softmax', 'softmax_data'),
                    ('softmax_data', 'result'),
                ]
            else:
                ref_nodes_attributes = {
                    'input': {'kind': 'op', 'type': 'Parameter', 'shape': converted_shape},
                    'input_data': {'shape': shape, 'kind': 'data', 'value': None},
                    'flatten_shape_val': {'shape': int64_array(reshape_data_val).shape,
                                          'kind': 'data',
                                          'value': int64_array(reshape_data_val)},
                    'flatten_shape': {'type': 'Const', 'kind': 'op', 'shape': 2},
                    'flatten_shape_data': {'shape': int64_array([2]), 'kind': 'data', 'value': None},
                    'reshape': {'kind': 'op', 'type': 'Reshape'},
                    'reshape_data': {'kind': 'data', 'shape': flatten_shape, 'value': None},
                    'softmax': {'type': 'SoftMax', 'kind': 'op', 'axis': 1},
                    'softmax_data': {'shape': flatten_shape, 'kind': 'data', 'value': None},
                    'last_shape_val': {'shape': int64_array(shape).shape, 'kind': 'data', 'value': int64_array(shape)},
                    'last_shape': {'type': 'Const', 'kind': 'op', 'shape': len(shape)},
                    'last_shape_data': {'shape': int64_array([len(shape)]), 'kind': 'data', 'value': None},
                    'last_reshape': {'kind': 'op', 'type': 'Reshape'},
                    'last_reshape_data': {'kind': 'data', 'shape': shape, 'value': None},
                    'result': {'kind': 'op', 'type': 'Result'},
                }

                ref_edges = [
                    ('input', 'input_data'),
                    ('flatten_shape_val', 'flatten_shape'),
                    ('flatten_shape', 'flatten_shape_data'),
                    ('flatten_shape_data', 'reshape', {'in': 1}),
                    ('input_data', 'reshape', {'in': 0}),
                    ('reshape', 'reshape_data'),
                    ('reshape_data', 'softmax'),
                    ('softmax', 'softmax_data'),
                    ('last_shape_val', 'last_shape'),
                    ('last_shape', 'last_shape_data'),
                    ('last_shape_data', 'last_reshape', {'in': 1}),
                    ('softmax_data', 'last_reshape', {'in': 0}),
                    ('last_reshape', 'last_reshape_data'),
                    ('last_reshape_data', 'result'),
                ]

            ref_net = build_graph(ref_nodes_attributes, ref_edges)

        return onnx_net, ref_net
Esempio n. 7
0
    def create_reshape_net(self, input_shape, output_shape, ir_version):
        """
            ONNX net                                  IR net

            Input->Reshape->Output   =>    Input->Reshape

        """

        #
        #   Create ONNX model
        #

        import onnx
        from onnx import helper
        from onnx import TensorProto

        input = helper.make_tensor_value_info('input', TensorProto.FLOAT,
                                              input_shape)
        output = helper.make_tensor_value_info('output', TensorProto.FLOAT,
                                               output_shape)

        node_shape_def = onnx.helper.make_node(
            'Constant',
            inputs=[],
            outputs=['shape'],
            value=helper.make_tensor(
                name='const_tensor',
                data_type=TensorProto.INT64,
                dims=[len(output_shape)],
                vals=output_shape,
            ),
        )

        node_reshape_def = onnx.helper.make_node('Reshape',
                                                 inputs=['input', 'shape'],
                                                 outputs=['output'])

        # Create the graph (GraphProto)
        graph_def = helper.make_graph(
            [node_shape_def, node_reshape_def],
            'test_reshape_model',
            [input],
            [output],
        )

        # Create the model (ModelProto)
        onnx_net = helper.make_model(graph_def,
                                     producer_name='test_reshape_model')

        #
        #   Create reference IR net
        #   Please, specify 'type': 'Input' for input node
        #   Moreover, do not forget to validate ALL layer attributes!!!
        #

        ref_net = None

        if check_ir_version(10, None, ir_version):
            nodes_attributes = {
                'input': {
                    'kind': 'op',
                    'type': 'Parameter'
                },
                'input_data': {
                    'shape': input_shape,
                    'kind': 'data'
                },
                'input_data_1': {
                    'shape': [len(output_shape)],
                    'value': output_shape,
                    'kind': 'data'
                },
                'const_1': {
                    'kind': 'op',
                    'type': 'Const'
                },
                'const_data_1': {
                    'shape': [len(output_shape)],
                    'value': None,
                    'kind': 'data'
                },  # 'value': output_shape,
                'reshape': {
                    'kind': 'op',
                    'type': 'Reshape'
                },
                'reshape_data': {
                    'shape': output_shape,
                    'kind': 'data'
                },
                'result': {
                    'kind': 'op',
                    'type': 'Result'
                }
            }

            ref_net = build_graph(nodes_attributes,
                                  [('input', 'input_data'),
                                   ('input_data_1', 'const_1'),
                                   ('const_1', 'const_data_1'),
                                   ('const_data_1', 'reshape'),
                                   ('input_data', 'reshape'),
                                   ('reshape', 'reshape_data'),
                                   ('reshape_data', 'result')])

        return onnx_net, ref_net
Esempio n. 8
0
 def test_error_3(self):
     graph = build_graph(self.nodes, self.edges)
     self.assertRaises(Error, input_user_data_repack, graph, ['Bcb'], None)
Esempio n. 9
0
 def test_output_user_data_repack(self):
     graph = build_graph(self.nodes, self.edges)
     output = output_user_data_repack(graph, ['Cc'])
     self.assertDictEqual(output, {'C': [{'port': None}]})
Esempio n. 10
0
 def test_lstm_nonlinearity_dropout(self):
     graph = build_graph(
         {
             'in': {
                 'kind': 'op',
                 'op': 'Parameter'
             },
             'lstm': {
                 'kind': 'op',
                 'op': 'LstmNonLinearity',
                 'use_dropout': True,
                 'i_weights': np.array([]),
                 'f_weights': np.array([]),
                 'o_weights': np.array([]),
             },
             'out': {
                 'kind': 'op',
                 'op': 'Placeholder'
             }
         }, [('in', 'lstm'), ('lstm', 'out')],
         nodes_with_edges_only=True)
     graph.stage = 'front'
     # split input to (i_part, f_part, c_part, o_part, ct_1)
     ref_graph = build_graph(self.nodes_attributes, [
         ('in', 'split_dropout'),
         ('split_dropout', 'split', {
             'out': 0
         }),
         ('split', 'scale_i_c', {
             'out': 4
         }),
         ('scale_i_c', 'i_plus_c'),
         ('split', 'i_plus_c', {
             'out': 0
         }),
         ('i_plus_c', 'sigmoid_i'),
         ('sigmoid_i', 'scaled_i', {
             'in': 0
         }),
         ('split_dropout', 'scaled_i', {
             'out': 1,
             'in': 1
         }),
         ('split', 'scale_f_c', {
             'out': 4
         }),
         ('scale_f_c', 'f_plus_c'),
         ('split', 'f_plus_c', {
             'out': 1
         }),
         ('f_plus_c', 'sigmoid_f'),
         ('sigmoid_f', 'scaled_f', {
             'in': 0
         }),
         ('split_dropout', 'scaled_f', {
             'out': 2,
             'in': 1
         }),
         ('split', 'tanhcp', {
             'out': 2
         }),
         ('tanhcp', 'i_mul_tanhc'),
         ('scaled_i', 'i_mul_tanhc'),
         ('scaled_f', 'f_mul_c'),
         ('split', 'f_mul_c', {
             'out': 4
         }),
         ('f_mul_c', 'fc_plus_itanhc'),
         ('i_mul_tanhc', 'fc_plus_itanhc'),
         ('split', 'scale_o_c', {
             'out': 4
         }),
         ('scale_o_c', 'o_plus_c'),
         ('split', 'o_plus_c', {
             'out': 3
         }),
         ('o_plus_c', 'sigmoid_o'),
         ('sigmoid_o', 'scaled_o', {
             'in': 0
         }),
         ('split_dropout', 'scaled_o', {
             'out': 3,
             'in': 1
         }),
         ('fc_plus_itanhc', 'tanhc'),
         ('scaled_o', 'o_mul_tanhc'),
         ('tanhc', 'o_mul_tanhc'),
         ('fc_plus_itanhc', 'concat'),
         ('o_mul_tanhc', 'concat'),
         ('lstm', 'out'),
     ],
                             nodes_with_edges_only=True)
     ReplaceLstmNonLinearityPattern().replace_op(graph, Node(graph, 'lstm'))
     (flag, resp) = compare_graphs(graph,
                                   ref_graph,
                                   'out',
                                   check_op_attrs=True)
     self.assertTrue(flag, resp)
Esempio n. 11
0
 def test_error_2(self):
     graph = build_graph(self.nodes, self.edges)
     self.assertRaises(Error, input_user_data_repack, graph,
                       np.array([1, 227, 227, 3]), None)
Esempio n. 12
0
    def create_swish_net(self, shape, ir_version, use_new_frontend):
        """
            Tensorflow net                 IR net

            Input->Swish       =>       Input->Swish

        """

        #
        #   Create Tensorflow model
        #

        import tensorflow as tf

        tf.reset_default_graph()

        # Create the graph and model
        with tf.Session() as sess:
            tf_x_shape = shape.copy()

            tf_x_shape = permute_nchw_to_nhwc(tf_x_shape, use_new_frontend)
            input = tf.placeholder(tf.float32, tf_x_shape, 'Input')

            tf.nn.swish(input)

            tf.global_variables_initializer()
            tf_net = sess.graph_def

        #
        #   Create reference IR net
        #   Please, specify 'type': 'Input' for input node
        #   Moreover, do not forget to validate ALL layer attributes!!!
        #

        ref_net = None

        if check_ir_version(10, None, ir_version) and not use_new_frontend:
            nodes_attributes = {
                'input': {
                    'kind': 'op',
                    'type': 'Parameter'
                },
                'input_data': {
                    'shape': shape,
                    'kind': 'data'
                },
                'Swish': {
                    'kind': 'op',
                    'type': 'Swish'
                },
                'Swish_data': {
                    'shape': shape,
                    'kind': 'data'
                },
                'result': {
                    'kind': 'op',
                    'type': 'Result'
                }
            }

            ref_net = build_graph(nodes_attributes, [('input', 'input_data'),
                                                     ('input_data', 'Swish'),
                                                     ('Swish', 'Swish_data'),
                                                     ('Swish_data', 'result')])

        return tf_net, ref_net
    def test(self):
        nodes = {
            **regular_op('input', {'type': 'Parameter'}),
            **regular_op('shape', {
                'type': 'ShapeOf',
                'kind': 'op',
                'op': 'ShapeOf'
            }),
            **regular_op(
                'random_uniform', {
                    'type': 'RandomUniform',
                    'kind': 'op',
                    'op': 'RandomUniform',
                    'name': 'dropout/RU'
                }),
            **regular_op('mul', {
                'type': 'Mul',
                'kind': 'op',
                'op': 'Mul'
            }),
            **regular_op('add', {
                'type': 'Add',
                'kind': 'op',
                'op': 'Add'
            }),
            **regular_op('add2', {
                'type': 'Add',
                'kind': 'op',
                'op': 'Add'
            }),
            **regular_op('floor', {
                'type': 'Floor',
                'kind': 'op',
                'op': 'Floor'
            }),
            'add_const': {
                'kind': 'op',
                'op': 'Const',
                'value': np.array(0.0),
                'data_type': np.float32
            },
            **result('result'),

            # new nodes to be added
            'broadcast_const': {
                'kind': 'op',
                'op': 'Const',
                'value': np.array(0.5),
                'data_type': np.float32
            },
            **regular_op('broadcast', {
                'type': 'Broadcast',
                'kind': 'op',
                'op': 'Broadcast'
            }),
        }
        edges = [('input', 'shape'), ('shape', 'random_uniform'),
                 ('random_uniform', 'mul'), ('mul', 'add'),
                 ('add_const', 'add'), ('add', 'add2'), ('add2', 'floor'),
                 ('floor', 'result')]
        graph = build_graph(nodes, edges, nodes_with_edges_only=True)

        graph.graph['layout'] = 'NCHW'
        graph.stage = 'front'

        DropoutWithRandomUniformReplacer().find_and_replace_pattern(graph)

        edges_ref = [('input', 'shape'), ('broadcast_const', 'broadcast'),
                     ('shape', 'broadcast'), ('broadcast', 'mul'),
                     ('mul', 'add'), ('add_const', 'add'), ('add', 'add2'),
                     ('add2', 'floor'), ('floor', 'result')]
        graph_ref = build_graph(nodes, edges_ref, nodes_with_edges_only=True)

        # check graph structure after the transformation and output name
        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(graph.node[graph.get_nodes_with_attributes(
            op='Broadcast')[0]]['name'] == 'dropout/RU')
 def test_conversion(self, input_shape, scales, axes):
     input_shape_as_array = int64_array(input_shape)
     scales_as_array = float32_array(scales)
     graph = build_graph(
         graph_node_attrs, graph_edges, {
             'placeholder_data': {
                 'shape': input_shape_as_array
             },
             'scales': {
                 'value': scales_as_array,
                 'shape': scales_as_array.shape
             },
             'scales_data': {
                 'value': scales_as_array,
                 'shape': scales_as_array.shape
             },
             'upsample_data': {
                 'shape':
                 ((input_shape_as_array + 1.e-5) * scales_as_array).astype(
                     np.int64)
             }
         })
     graph.graph['layout'] = 'NCHW'
     ref_graph = build_graph(
         new_ref_graph_node_attr, new_ref_graph_edges, {
             'placeholder_data': {
                 'shape': int64_array(input_shape)
             },
             'ss_begin': {
                 'value': int64_array([axes[0]])
             },
             'ss_end': {
                 'value': int64_array([axes[-1] + 1])
             },
             'ss_begin_data': {
                 'value': int64_array([axes[0]])
             },
             'ss_end_data': {
                 'value': int64_array([axes[-1] + 1])
             },
             'factor': {
                 'value': scales_as_array[2:],
                 'shape': scales_as_array[2:].shape
             },
             'factor_data': {
                 'value': scales_as_array[2:],
                 'shape': scales_as_array[2:].shape
             },
             'axes_const': {
                 'value': int64_array(axes),
                 'shape': int64_array(axes).shape
             },
             'interpolate_data': {
                 'shape':
                 (input_shape_as_array * scales_as_array + 1e-5).astype(
                     np.int64)
             },
         })
     UpsampleToResample().find_and_replace_pattern(graph)
     (flag, resp) = compare_graphs(graph, ref_graph, 'output')
     self.assertTrue(flag, resp)
Esempio n. 15
0
    def create_fake_quantize_net(self, il, ih, num_bits, narrow_range,
                                 nudged_il, nudged_ih, expected_step,
                                 ir_version, use_new_frontend):
        # original tf model
        import tensorflow as tf
        tf.compat.v1.reset_default_graph()
        with tf.compat.v1.Session() as sess:
            data = tf.compat.v1.placeholder(tf.float32, [11], 'parameter')
            input_min = tf.constant(il, name='input_min')
            input_max = tf.constant(ih, name='input_max')
            tf.quantization.fake_quant_with_min_max_vars(
                data, input_min, input_max, num_bits, narrow_range, 'fq')

            tf.compat.v1.global_variables_initializer()
            tf_net = sess.graph_def

        # reference graph to compare with IR
        ref_net = None
        if check_ir_version(10, None, ir_version) and not use_new_frontend:
            levels = 2**num_bits - int(narrow_range)

            # data (shape, value) -> const (shape, vale) -> data (shape, no value)
            const_for_layer_tests = lambda name, value: {
                **{
                    name + '_dd': {
                        'kind': 'data',
                        'value': value,
                        'shape': value.shape
                    }
                },
                **{
                    name: {
                        'kind': 'op',
                        'type': 'Const'
                    }
                },
                **shaped_data(name + '_d', int64_array(value.shape))
            }

            connect_const_for_layer_tests = lambda first_tensor_name, second_tensor_name: [
                *connect_front(first_tensor_name + '_dd', first_tensor_name),
                *connect(first_tensor_name, second_tensor_name)
            ]

            nodes = {
                **regular_op_with_shaped_data('parameter', [11], {
                                                  'type': 'Parameter'
                                              }),
                **const_for_layer_tests(
                    'il', np.array([nudged_il], dtype=np.float32)),
                **const_for_layer_tests(
                    'ih', np.array([nudged_ih], dtype=np.float32)),
                **const_for_layer_tests(
                    'ol', np.array([nudged_il], dtype=np.float32)),
                **const_for_layer_tests(
                    'oh', np.array([nudged_ih], dtype=np.float32)),
                **regular_op_with_shaped_data('fq', [11], {
                                                  'type': 'FakeQuantize',
                                                  'levels': levels
                                              }),
                **regular_op('result', {'type': 'Result'}),
            }
            edges = [
                *connect('parameter', '0:fq'),
                *connect_const_for_layer_tests('il', '1:fq'),
                *connect_const_for_layer_tests('ih', '2:fq'),
                *connect_const_for_layer_tests('ol', '3:fq'),
                *connect_const_for_layer_tests('oh', '4:fq'),
                *connect('fq', 'result'),
            ]
            ref_net = build_graph(nodes, edges)

        return tf_net, ref_net
Esempio n. 16
0
 def test_output_user_data_repack_ports(self):
     graph = build_graph(self.nodes, self.edges)
     output = output_user_data_repack(graph, ['Cc:1', '0:Cc'])
     self.assertDictEqual(output, {'C': [{'out': 1}, {'in': 0}]})
Esempio n. 17
0
    def create_net_const(self, shape, precision, ir_version):
        """
            ONNX net                                       IR net

            Input->Concat(+Softplus const)->Output   =>    Input->Concat(+const)

        """

        #
        #   Create ONNX model
        #

        import onnx
        from onnx import helper
        from onnx import TensorProto
        import numpy as np

        concat_axis = 0
        output_shape = shape.copy()
        output_shape[concat_axis] *= 2

        input = helper.make_tensor_value_info('input', TensorProto.FLOAT,
                                              shape)
        output = helper.make_tensor_value_info('output', TensorProto.FLOAT,
                                               output_shape)

        constant = np.random.rand(*shape).astype(np.float32) * 255 + 0.5

        node_const_def = onnx.helper.make_node(
            'Constant',
            inputs=[],
            outputs=['const1'],
            value=helper.make_tensor(
                name='const_tensor',
                data_type=TensorProto.FLOAT,
                dims=constant.shape,
                vals=constant.flatten(),
            ),
        )

        node_def = onnx.helper.make_node(
            'Softplus',
            inputs=['const1'],
            outputs=['Softplus1'],
        )

        node_concat_def = onnx.helper.make_node('Concat',
                                                inputs=['input', 'Softplus1'],
                                                outputs=['output'],
                                                axis=concat_axis)

        # Create the graph (GraphProto)
        graph_def = helper.make_graph(
            [node_const_def, node_def, node_concat_def],
            'test_model',
            [input],
            [output],
        )

        # Create the model (ModelProto)
        onnx_net = helper.make_model(graph_def, producer_name='test_model')

        #
        #   Create reference IR net
        #
        constant = np.log(np.exp(constant) + 1.0)
        if precision == 'FP16':
            constant = constant.astype(np.float16)

        ref_net = None

        if check_ir_version(10, None, ir_version):
            nodes_attributes = {
                'input': {
                    'kind': 'op',
                    'type': 'Parameter'
                },
                'input_data': {
                    'shape': shape,
                    'kind': 'data'
                },
                'input_const_data': {
                    'kind': 'data',
                    'value': constant.flatten()
                },
                'const': {
                    'kind': 'op',
                    'type': 'Const'
                },
                'const_data': {
                    'shape': shape,
                    'kind': 'data'
                },
                'concat': {
                    'kind': 'op',
                    'type': 'Concat',
                    'axis': concat_axis
                },
                'concat_data': {
                    'shape': output_shape,
                    'kind': 'data'
                },
                'result': {
                    'kind': 'op',
                    'type': 'Result'
                }
            }

            ref_net = build_graph(nodes_attributes,
                                  [('input', 'input_data'),
                                   ('input_const_data', 'const'),
                                   ('const', 'const_data'),
                                   ('input_data', 'concat'),
                                   ('const_data', 'concat'),
                                   ('concat', 'concat_data'),
                                   ('concat_data', 'result')])

        return onnx_net, ref_net
Esempio n. 18
0
 def test_output_user_data_repack_none(self):
     graph = build_graph(self.nodes, self.edges)
     output = output_user_data_repack(graph, None)
     self.assertEqual(output, None)
Esempio n. 19
0
    def test_add_output_1(self):
        sub_graph_2 = build_graph(nodes_attrs=sub_graph_2_nodes,
                                  edges=[
                                      *connect('cond_2_int', 'cond_2_int_out'),
                                      *connect('in_2_int', 'OUT_2'),
                                      *connect('ones', 'OUT_2'),
                                      *connect('OUT_2', 'OUT_2_out'),
                                      *connect('in_2_int', 'in_2_int_out')
                                  ],
                                  nodes_with_edges_only=True)

        sub_graph_1 = build_graph(nodes_attrs=sub_graph_1_nodes,
                                  edges=[
                                      *connect('M_2', '0:Loop_2'),
                                      *connect('cond_2', '1:Loop_2'),
                                      *connect('IN_2', '2:Loop_2'),
                                      *connect('Loop_2:0', 'Loop_2_out'),
                                      *connect('in_1_int', 'in_1_int_out'),
                                      *connect('cond_1_int', 'cond_1_int_out')
                                  ],
                                  nodes_with_edges_only=True)
        loop_node_1 = Node(sub_graph_1, 'Loop_2')
        loop_node_1.body = sub_graph_2

        main_graph = build_graph(nodes_attrs=main_graph_nodes,
                                 edges=[
                                     *connect('M', '0:Loop'),
                                     *connect('cond', '1:Loop'),
                                     *connect('IN_2', '2:Loop'),
                                     *connect('IN_1', "3:Loop"),
                                     *connect('Loop:0', 'OUT_1')
                                 ],
                                 nodes_with_edges_only=True)
        loop_node = Node(main_graph, 'Loop')
        loop_node.body = sub_graph_1
        main_graph.graph['additional_outputs'] = ['Loop', 'Loop_2']
        loop_node_output_port_map_len = len(loop_node.output_port_map)
        loop_node_out_ports_len = len(loop_node.out_ports())
        loop_2_out_ports_len = len(loop_node_1.out_ports())
        max_layer_id = 5

        AddOutputRecursive().find_and_replace_pattern(main_graph)

        loop_node = Node(main_graph, 'Loop')
        self.assertEqual(len(loop_node.output_port_map),
                         loop_node_output_port_map_len + 1)
        self.assertEqual(len(loop_node.out_ports()),
                         loop_node_out_ports_len + 1)
        self.assertEqual(
            loop_node.out_port(1).get_destination().node.op, 'Result')
        self.assertTrue(
            np.all(
                loop_node.out_port(1).data.get_shape() == int64_array(
                    [5, 10, 4, 64, 54])))
        last_node = Node(sub_graph_1, 'Loop_2')
        self.assertEqual(len(last_node.out_ports()), loop_2_out_ports_len)
        unsq_node = last_node.out_port(0).get_destinations()[1].node
        self.assertEqual(unsq_node.op, 'Unsqueeze')
        self.assertEqual(
            unsq_node.out_port(0).get_destination().node.op, 'Result')
        self.assertEqual(
            unsq_node.out_port(0).get_destination().node.internal_layer_id,
            max_layer_id + 3)
        self.assertTrue(
            np.all(
                unsq_node.out_port(0).data.get_shape() == int64_array(
                    [1, 10, 4, 64, 54])))
Esempio n. 20
0
 def test_one_input_no_shape(self):
     shape = None
     inputs = {'conv_1': [{'shape': shape}]}
     nodes = {
         'old_input': {
             'type': 'Parameter',
             'kind': 'op',
             'op': 'Parameter'
         },
         'old_input_data': {
             'kind': 'data',
             'value': None,
             'shape': np.array([-1, 224, 224, 3])
         },
         'conv_1': {
             'type': 'Convolution',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'conv_1_data': {
             'kind': 'data',
             'value': True,
             'shape': np.array([-1, 224, 224, 3])
         },
         'relu_1': {
             'type': 'ReLU',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'relu_1_data': {
             'kind': 'data',
             'value': None,
             'shape': np.array([-1, 112, 112, 64])
         },
         'output': {
             'type': 'SoftMax',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'output_data': {
             'name': 'output_data',
             'kind': 'data',
             'shape': np.array([-1, 112, 112, 64])
         },
         'op_output': {
             'kind': 'op',
             'op': 'Result'
         }
     }
     edges = [('old_input', 'old_input_data'), ('old_input_data', 'conv_1'),
              ('conv_1', 'conv_1_data'), ('conv_1_data', 'relu_1'),
              ('relu_1', 'relu_1_data'), ('relu_1_data', 'output'),
              ('output', 'output_data'), ('output_data', 'op_output')]
     graph = build_graph(nodes, edges)
     graph.stage = 'middle'
     add_input_ops(graph=graph,
                   user_defined_inputs=inputs,
                   before_infer=False)
     new_input = list(graph.in_edges(list(
         graph.in_edges('conv_1'))[0][0]))[0][0]
     new_input_data = list(graph.in_edges('conv_1'))[0][0]
     self.assertFalse(graph.node['old_input']['is_input'])
     self.assertTrue(graph.node[new_input]['is_input'])
     self.assertTrue((new_input_data, 'conv_1') in graph.edges())
     self.assertTrue(('old_input_data', 'conv_1') not in graph.edges())
     self.assertIsNotNone(graph.node[new_input_data]['shape'])
Esempio n. 21
0
    def create_reshape_net_const(self, input_shape, output_shape, ir_version):
        """
            ONNX net                                         IR net

            Input->Concat(+reshaped const)->Output   =>    Input->Concat(+const)

        """

        #
        #   Create ONNX model
        #

        import onnx
        from onnx import helper
        from onnx import TensorProto
        import numpy as np

        concat_axis = 0
        concat_output_shape = output_shape.copy()
        concat_output_shape[concat_axis] *= 2

        input = helper.make_tensor_value_info('input', TensorProto.FLOAT,
                                              output_shape)
        output = helper.make_tensor_value_info('output', TensorProto.FLOAT,
                                               concat_output_shape)

        const_number = np.prod(input_shape)
        constant = np.random.randint(-127, 127, const_number).astype(np.float)

        node_const_def = onnx.helper.make_node(
            'Constant',
            inputs=[],
            outputs=['const1'],
            value=helper.make_tensor(
                name='const_tensor',
                data_type=TensorProto.FLOAT,
                dims=input_shape,
                vals=constant,
            ),
        )

        node_shape_def = onnx.helper.make_node(
            'Constant',
            inputs=[],
            outputs=['shape'],
            value=helper.make_tensor(
                name='const_tensor',
                data_type=TensorProto.INT64,
                dims=[len(output_shape)],
                vals=output_shape,
            ),
        )

        node_reshape_def = onnx.helper.make_node('Reshape',
                                                 inputs=['const1', 'shape'],
                                                 outputs=['reshape1'])

        node_concat_def = onnx.helper.make_node('Concat',
                                                inputs=['input', 'reshape1'],
                                                outputs=['output'],
                                                axis=concat_axis)

        # Create the graph (GraphProto)
        graph_def = helper.make_graph(
            [
                node_const_def, node_shape_def, node_reshape_def,
                node_concat_def
            ],
            'test_reshape_model',
            [input],
            [output],
        )

        # Create the model (ModelProto)
        onnx_net = helper.make_model(graph_def,
                                     producer_name='test_reshape_model')

        #
        #   Create reference IR net
        #   Please, specify 'type': 'Input' for input node
        #   Moreover, do not forget to validate ALL layer attributes!!!
        #

        ref_net = None

        if check_ir_version(10, None, ir_version):
            nodes_attributes = {
                'input': {
                    'kind': 'op',
                    'type': 'Parameter'
                },
                'input_data': {
                    'shape': output_shape,
                    'kind': 'data'
                },
                'input_const_data': {
                    'kind': 'data',
                    'value': constant
                },
                'const': {
                    'kind': 'op',
                    'type': 'Const'
                },
                'const_data': {
                    'shape': output_shape,
                    'value': None,
                    'kind': 'data'
                },
                'concat': {
                    'kind': 'op',
                    'type': 'Concat',
                    'axis': concat_axis
                },
                'concat_data': {
                    'shape': concat_output_shape,
                    'kind': 'data'
                },
                'result': {
                    'kind': 'op',
                    'type': 'Result'
                }
            }

            ref_net = build_graph(nodes_attributes, [
                ('input', 'input_data'),
                ('input_const_data', 'const'),
                ('const', 'const_data'),
                ('input_data', 'concat'),
                ('const_data', 'concat'),
                ('concat', 'concat_data'),
                ('concat_data', 'result'),
            ])

        return onnx_net, ref_net
Esempio n. 22
0
 def test_two_inputs_two_shapes_positive_1(self):
     shape_1 = [1, 2, 3, 4]
     shape_2 = [4, 3, 2, 1]
     inputs = {
         'node_1': [{
             'shape': shape_1
         }],
         'node_4': [{
             'shape': shape_2
         }]
     }
     nodes = {
         'input_1': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'Parameter'
         },
         'input_2': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'Parameter'
         },
         'node_1': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'node_2': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'node_3': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'node_4': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'output': {
             'kind': 'op',
             'op': 'Result'
         }
     }
     edges = [('input_1', 'node_1'), ('node_1', 'node_2'),
              ('node_3', 'output'), ('input_2', 'node_4'),
              ('node_4', 'output')]
     graph = build_graph(nodes, edges)
     add_input_ops(graph=graph,
                   user_defined_inputs=inputs,
                   before_infer=True)
     new_input_1 = list(graph.in_edges('node_1'))[0][0]
     new_input_2 = list(graph.in_edges('node_4'))[0][0]
     self.assertFalse(graph.node['input_1']['is_input'])
     self.assertTrue(graph.node[new_input_1]['is_input'])
     self.assertTrue(graph.node[new_input_2]['is_input'])
     self.assertTrue((new_input_1, 'node_1') in graph.edges())
     self.assertTrue((new_input_2, 'node_4') in graph.edges())
     self.assertListEqual(shape_1, graph.node[new_input_1]['shape'])
     self.assertListEqual(shape_2, graph.node[new_input_2]['shape'])
Esempio n. 23
0
    def create_net_const(self, shape, ir_version):
        """
            ONNX net                                  IR net

            Input->Concat(+not const)->Output   =>    Input->Concat(+const)

        """

        #
        #   Create ONNX model
        #

        import onnx
        from onnx import helper
        from onnx import TensorProto

        concat_axis = 0
        output_shape = shape.copy()
        output_shape[concat_axis] *= 2

        input = helper.make_tensor_value_info('input', TensorProto.BOOL, shape)
        output = helper.make_tensor_value_info('output', TensorProto.BOOL,
                                               output_shape)

        constant = np.random.randint(0, 2, shape).astype(np.bool)

        node_const_def = onnx.helper.make_node(
            'Constant',
            inputs=[],
            outputs=['const1'],
            value=helper.make_tensor(
                name='const_tensor',
                data_type=TensorProto.BOOL,
                dims=constant.shape,
                vals=constant.flatten(),
            ),
        )

        node_def = onnx.helper.make_node('Not',
                                         inputs=['const1'],
                                         outputs=['not'])

        node_concat_def = onnx.helper.make_node('Concat',
                                                inputs=['input', 'not'],
                                                outputs=['output'],
                                                axis=concat_axis)

        # Create the graph (GraphProto)
        graph_def = helper.make_graph(
            [node_const_def, node_def, node_concat_def],
            'test_model',
            [input],
            [output],
        )

        # Create the model (ModelProto)
        onnx_net = helper.make_model(graph_def, producer_name='test_model')

        #
        #   Create reference IR net
        #
        constant = np.logical_not(constant)

        ref_net = None
        if check_ir_version(10, None, ir_version):
            nodes_attributes = {
                'input': {
                    'kind': 'op',
                    'type': 'Parameter'
                },
                'input_data': {
                    'shape': shape,
                    'kind': 'data'
                },
                'input_const_data': {
                    'kind': 'data',
                    'value': constant.flatten()
                },
                'const': {
                    'kind': 'op',
                    'type': 'Const'
                },
                'const_data': {
                    'shape': shape,
                    'kind': 'data'
                },
                'concat': {
                    'kind': 'op',
                    'type': 'Concat',
                    'axis': concat_axis
                },
                'concat_data': {
                    'shape': output_shape,
                    'kind': 'data'
                },
                'result': {
                    'kind': 'op',
                    'type': 'Result'
                }
            }
            ref_net = build_graph(nodes_attributes,
                                  [('input', 'input_data'),
                                   ('input_const_data', 'const'),
                                   ('const', 'const_data'),
                                   ('input_data', 'concat'),
                                   ('const_data', 'concat'),
                                   ('concat', 'concat_data'),
                                   ('concat_data', 'result')])

        return onnx_net, ref_net
Esempio n. 24
0
    def test_add_input_with_output_port_before_infer(self):
        shape = np.array([1, 2, 3, 4])
        inputs = {'conv_1': [{'shape': shape, 'out': 0}]}
        nodes = {
            'old_input': {
                'type': 'Identity',
                'kind': 'op',
                'op': 'Parameter'
            },
            'conv_1': {
                'type': 'Convolution',
                'kind': 'op',
                'op': 'NotPlaceholder'
            },
            'conv_2': {
                'type': 'Convolution',
                'kind': 'op',
                'op': 'NotPlaceholder'
            },
            'relu_1': {
                'type': 'ReLU',
                'kind': 'op',
                'op': 'NotPlaceholder'
            },
            'output': {
                'type': 'SoftMax',
                'kind': 'op',
                'op': 'NotPlaceholder'
            }
        }
        edges = [('old_input', 'conv_1'), ('conv_1', 'relu_1'),
                 ('conv_2', 'relu_1'), ('relu_1', 'output')]
        graph = build_graph(nodes, edges)
        add_input_ops(graph=graph,
                      user_defined_inputs=inputs,
                      before_infer=True)

        graph_ref = build_graph(
            nodes_attrs={
                'new_input': {
                    'kind': 'op',
                    'op': 'Parameter',
                    'shape': shape
                },
                **nodes
            },
            edges=[
                ('new_input', 'relu_1'),
                ('relu_1', 'output'),
                ('conv_2', 'relu_1'),
                ('old_input', 'conv_1'),
            ],
        )
        # Check that new input is added right (with right ports !)
        (flag, resp) = compare_graphs(graph, graph_ref, last_node='output')
        self.assertTrue(flag, resp)

        # Check that other graph is not damaged
        (flag, resp) = compare_graphs(graph, graph_ref, last_node='conv_1')
        self.assertTrue(flag, resp)

        # Checks for new input and edges
        self.assertTrue('conv_1/placeholder_out_port_0' in graph.nodes())
        new_input = 'conv_1/placeholder_out_port_0'
        self.assertTrue(graph.node[new_input]['is_input'])
        self.assertTrue((new_input, 'relu_1') in graph.edges())
        self.assertTrue(('old_input', 'relu_1') not in graph.edges())
    def test_useless_crops_without_concat(self):
        graph = build_graph(
            {
                'placeholder_in': {
                    'kind': 'op',
                    'op': 'Parameter'
                },
                'in_node': {
                    'kind': 'data',
                    'shape': [1, 130]
                },
                'crop1': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 0,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_1': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'crop2': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 26,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_2': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'crop3': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 52,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_3': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'crop4': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 78,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_4': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'crop5': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 104,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_5': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'placeholder_concat': {
                    'kind': 'op',
                    'op': None
                },
                'placeholder_concat_data': {
                    'kind': 'data',
                    'shape': [1, 100]
                },
                'concat': {
                    'kind': 'op',
                    'op': 'Concat'
                },
                'concat_data': {
                    'kind': 'data',
                    'shape': [1, 230]
                },
                'placeholder': {
                    'kind': 'op',
                    'op': None
                },
            }, [('placeholder_in', 'in_node'), ('in_node', 'crop1'),
                ('crop1', 'crop_data_1'), ('in_node', 'crop2'),
                ('crop2', 'crop_data_2'), ('in_node', 'crop3'),
                ('crop3', 'crop_data_3'), ('in_node', 'crop4'),
                ('crop4', 'crop_data_4'), ('in_node', 'crop5'),
                ('crop5', 'crop_data_5'),
                ('placeholder_concat', 'placeholder_concat_data'),
                ('crop_data_1', 'concat', {
                    'in': 0
                }), ('crop_data_2', 'concat', {
                    'in': 1
                }), ('crop_data_3', 'concat', {
                    'in': 2
                }), ('crop_data_4', 'concat', {
                    'in': 3
                }), ('crop_data_5', 'concat', {
                    'in': 4
                }), ('placeholder_concat_data', 'concat', {
                    'in': 5
                }), ('concat', 'concat_data'), ('concat_data', 'placeholder')])
        RemoveUselessCropsPattern().find_and_replace_pattern(graph)
        ref_graph = build_graph(
            {
                'placeholder_in': {
                    'kind': 'op',
                    'op': 'Parameter'
                },
                'in_node': {
                    'kind': 'data',
                    'shape': [1, 130]
                },
                'crop1': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 0,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_1': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'crop2': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 26,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_2': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'crop3': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 52,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_3': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'crop4': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 78,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_4': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'crop5': {
                    'kind': 'op',
                    'op': 'Crop',
                    'offset': 104,
                    'dim': 26,
                    'axis': -1
                },
                'crop_data_5': {
                    'kind': 'data',
                    'shape': [1, 26]
                },
                'placeholder_concat': {
                    'kind': 'op',
                    'op': None
                },
                'placeholder_concat_data': {
                    'kind': 'data',
                    'shape': [1, 100]
                },
                'concat': {
                    'kind': 'op',
                    'op': 'Concat'
                },
                'concat_data': {
                    'kind': 'data',
                    'shape': [1, 230]
                },
                'placeholder': {
                    'kind': 'op',
                    'op': 'Parameter'
                },
            }, [('placeholder_in', 'in_node'), ('in_node', 'crop1'),
                ('crop1', 'crop_data_1'), ('in_node', 'crop2'),
                ('crop2', 'crop_data_2'), ('in_node', 'crop3'),
                ('crop3', 'crop_data_3'), ('in_node', 'crop4'),
                ('crop4', 'crop_data_4'), ('in_node', 'crop5'),
                ('crop5', 'crop_data_5'),
                ('placeholder_concat', 'placeholder_concat_data'),
                ('in_node', 'concat', {
                    'in': 4
                }), ('placeholder_concat_data', 'concat', {
                    'in': 5
                }), ('concat', 'concat_data'), ('concat_data', 'placeholder')])

        (flag, resp) = compare_graphs(graph, ref_graph, 'placeholder')
        self.assertTrue(flag, resp)
Esempio n. 26
0
    def test_add_input_with_output_port_after_infer(self):
        shape = np.array([1, 2, 3, 4])
        inputs = {'conv_1': [{'shape': shape, 'out': 0}]}
        nodes = {
            'old_input': {
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'inp_data': {
                'kind': 'data',
                'shape': shape + 1
            },
            'conv_1': {
                'type': 'Convolution',
                'kind': 'op',
                'op': 'NotPlaceholder'
            },
            'conv_data': {
                'kind': 'data',
                'shape': shape,
                'value': None,
                'data_attr': 'data_attr_value'
            },
            'relu_1': {
                'type': 'ReLU',
                'kind': 'op',
                'op': 'NotPlaceholder'
            },
        }
        edges = [
            ('old_input', 'inp_data'),
            ('inp_data', 'conv_1'),
            ('conv_1', 'conv_data'),
            ('conv_data', 'relu_1', {
                'edge_attr': 'edge_value'
            }),
        ]
        graph = build_graph(nodes, edges)
        graph.stage = 'middle'
        add_input_ops(graph=graph,
                      user_defined_inputs=inputs,
                      before_infer=False)

        graph_ref = build_graph(
            nodes_attrs={
                'new_input': {
                    'kind': 'op',
                    'op': 'Parameter',
                    'shape': shape
                },
                **nodes
            },
            edges=[
                ('old_input', 'inp_data'),
                ('inp_data', 'conv_1'),
                ('new_input', 'conv_data'),
                ('conv_data', 'relu_1', {
                    'edge_attr': 'edge_value'
                }),
            ],
        )
        # Check that new input is added right (with right ports !)
        (flag, resp) = compare_graphs(graph, graph_ref, last_node='relu_1')
        self.assertTrue(flag, resp)

        # Check that other graph is not damaged
        (flag, resp) = compare_graphs(graph, graph_ref, last_node='conv_1')
        self.assertTrue(flag, resp)

        # Checks for new input and edges
        self.assertTrue('conv_1/placeholder_out_port_0' in graph.nodes())
        new_input = 'conv_1/placeholder_out_port_0'

        self.assertTrue(graph.node[new_input]['is_input'])

        self.assertTrue(
            Node(graph, 'relu_1').in_node(0)['data_attr'] == 'data_attr_value')
        self.assertTrue(
            Node(graph, 'relu_1').in_edge(0)['edge_attr'] == 'edge_value')
Esempio n. 27
0
    def test5_constant(self):
        #        ,-(new_shape)-->consumer3                           ,-->consumer3
        #   data---(new_shape)-->consumer1      =>    data-->reshape---->consumer1
        #        `-(new_shape)-->consumer2                           `-->consumer2
        #
        graph = build_graph(nodes_attributes, [
            ('placeholder_1', 'placeholder_1_data'),
            ('placeholder_1_data', 'eltwise_1'),
            ('placeholder_1_data', 'eltwise_2'),
            ('placeholder_1_data', 'eltwise_3'),
            ('eltwise_1', 'eltwise_1_data'),
            ('eltwise_2', 'eltwise_2_data'),
            ('eltwise_3', 'eltwise_3_data'),
            ('eltwise_1_data', 'concat'),
            ('eltwise_2_data', 'concat'),
            ('eltwise_3_data', 'concat'),
        ], {
            'placeholder_1_data': {
                'shape': int64_array([1, 3]),
                'value': np.ones([1, 3])
            },
            'eltwise_1_data': {
                'shape': int64_array([1, 1, 3])
            },
            'eltwise_2_data': {
                'shape': int64_array([1, 1, 3])
            },
            'eltwise_3_data': {
                'shape': int64_array([1, 1, 3])
            },
        },
                            nodes_with_edges_only=True)

        graph_ref = build_graph(nodes_attributes, [
            ('placeholder_1', 'placeholder_1_data'),
            ('placeholder_1_data', 'reshape_1'),
            ('reshape_1_const', 'reshape_1_const_data'),
            ('reshape_1_const_data', 'reshape_1'),
            ('reshape_1', 'reshape_1_data'),
            ('reshape_1_data', 'eltwise_1'),
            ('reshape_1_data', 'eltwise_2'),
            ('reshape_1_data', 'eltwise_3'),
            ('eltwise_1', 'eltwise_1_data'),
            ('eltwise_2', 'eltwise_2_data'),
            ('eltwise_3', 'eltwise_3_data'),
            ('eltwise_1_data', 'concat'),
            ('eltwise_2_data', 'concat'),
            ('eltwise_3_data', 'concat'),
        ], {
            'placeholder_1_data': {
                'shape': int64_array([1, 3]),
                'value': np.ones([1, 3])
            },
            'reshape_1_const': {
                'value': int64_array([0]),
                'shape': int64_array([1])
            },
            'reshape_1_const_data': {
                'value': int64_array([0]),
                'shape': int64_array([1])
            },
            'reshape_1_data': {
                'shape': int64_array([1, 1, 3])
            },
        },
                                nodes_with_edges_only=True)

        normalize_eltwise_inputs(graph)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'concat',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Esempio n. 28
0
 def test_input_user_data_repack_none(self):
     graph = build_graph(self.nodes, self.edges)
     input, freeze_placeholder = input_user_data_repack(graph, None, None)
     self.assertEqual(input, None)
     self.assertEqual(freeze_placeholder, None)
Esempio n. 29
0
    def test_bn_decomposition_1(self):
        graph = build_graph(nodes_attributes,
                            [('placeholder_1', 'placeholder_1_data'),
                             ('placeholder_1_data', 'bn_op'),
                             ('const_bn_const', 'bn_const'),
                             ('const_bn_beta', 'bn_beta'),
                             ('const_bn_mean', 'bn_mean'),
                             ('const_bn_var', 'bn_var'),
                             ('bn_const', 'bn_op'),
                             ('bn_beta', 'bn_op'),
                             ('bn_mean', 'bn_op'),
                             ('bn_var', 'bn_op'),
                             ('bn_op', 'bn_data'),
                             ('concat', 'concat_data'),
                             ('bn_data', 'concat'),
                             ('concat_data', 'op_output')
                             ],
                            {'placeholder_1_data': {'shape': np.array([1, 227, 227, 3])},
                             'bn_op': {'eps': 1.2},
                             'bn_const': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
                             'bn_beta': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
                             'bn_mean': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
                             'bn_var': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
                             'bn_data': {'shape': np.array([1, 227, 227, 3])},
                             'concat_data': {}
                             }, nodes_with_edges_only=True)

        graph_ref = build_graph(nodes_attributes,
                                [('placeholder_1', 'placeholder_1_data'),
                                 ('placeholder_1_data', 'mul_1'),
                                 ('const_mul_1_w', 'mul_1_w'),
                                 ('mul_1_w', 'mul_1'),
                                 ('mul_1', 'mul_1_data'),
                                 ('mul_1_data', 'add_1'),
                                 ('const_add_1_w', 'add_1_w'),
                                 ('add_1_w', 'add_1'),
                                 ('add_1', 'add_1_data'),
                                 ('add_1_data', 'mul_2'),
                                 ('const_mul_2_w', 'mul_2_w'),
                                 ('mul_2_w', 'mul_2'),
                                 ('mul_2', 'mul_2_data'),
                                 ('mul_2_data', 'add_2'),
                                 ('const_add_2_w', 'add_2_w'),
                                 ('add_2_w', 'add_2'),
                                 ('add_2', 'add_2_data'),
                                 ('concat', 'concat_data'),
                                 ('add_2_data', 'concat'),
                                 ('concat_data', 'op_output')
                                 ],
                                {'placeholder_1_data': {'shape': np.array([1, 227, 227, 3])},
                                 'const_mul_1_w': {'shape': np.array([3]),
                                             'value': np.array([0.67419986, 0.55901699, 0.48795004])},
                                 'mul_1_w': {'shape': np.array([3]),
                                             'value': np.array([0.67419986, 0.55901699, 0.48795004])},
                                 'const_mul_2_w': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
                                 'mul_2_w': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
                                 'const_add_1_w': {'shape': np.array([3]),
                                             'value': np.array([-0.67419986, -1.11803399, -1.46385011])},
                                 'add_1_w': {'shape': np.array([3]),
                                             'value': np.array([-0.67419986, -1.11803399, -1.46385011])},
                                 'const_add_2_w': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
                                 'add_2_w': {'shape': np.array([3]), 'value': np.array([1, 2, 3])},
                                 'add_2_data': {'shape': np.array([1, 227, 227, 3])},
                                 'mul_1': {'can_be_fused': True},
                                 'mul_2': {'can_be_fused': True},
                                 'add_1': {'can_be_fused': True},
                                 'add_2': {'can_be_fused': True},
                                 'concat_data': {}
                                 }, nodes_with_edges_only=True)

        graph.graph['layout'] = 'NHWC'
        convert_batch_norm(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'concat_data')
        self.assertTrue(flag, resp)
    def test_dequantize(self):
        graph = build_graph(nodes1_attributes, [
            ('input', 'input_data'),
            ('input_data', 'dequantize'),
            ('dequantize', 'dequantize_data'),
            ('scale_param_dq', 'scale_param_dq_data'),
            ('zerop_param_dq', 'zerop_param_dq_data'),
            ('scale_param_dq_data', 'dequantize'),
            ('zerop_param_dq_data', 'dequantize'),
            ('dequantize_data', 'out'),
            ('out', 'out_data'),
            ('out_data', 'result'),
        ], {
            'input_data': {
                'shape': int64_array([1, 3, 224, 224])
            },
            'scale_param_dq': {
                'shape': np.array([]),
                'value': np.float32(1.0 / 255)
            },
            'scale_param_dq_data': {
                'shape': np.array([]),
                'value': np.float32(1.0 / 255)
            },
            'zerop_param_dq': {
                'shape': np.array([]),
                'value': np.uint8(0)
            },
            'zerop_param_dq_data': {
                'shape': np.array([]),
                'value': np.uint8(0)
            },
        },
                            nodes_with_edges_only=True)

        graph_ref = build_graph(nodes_ref_attributes, [
            ('input', 'input_data'),
            ('input_data', 'cast'),
            ('cast', 'cast_data'),
            ('cast_data', 'sub'),
            ('zerop_param_dq', 'zerop_param_dq_data'),
            ('zerop_param_dq_data', 'sub'),
            ('sub', 'sub_data'),
            ('sub_data', 'mul'),
            ('scale_param_dq', 'scale_param_dq_data'),
            ('scale_param_dq_data', 'mul'),
            ('mul', 'mul_data'),
            ('mul_data', 'out'),
            ('out', 'out_data'),
            ('out_data', 'result'),
        ], {
            'input_data': {
                'shape': int64_array([1, 3, 224, 224])
            },
            'scale_param_dq': {
                'shape': np.array([]),
                'value': np.float32(1.0 / 255)
            },
            'scale_param_dq_data': {
                'shape': np.array([]),
                'value': np.float32(1.0 / 255)
            },
            'zerop_param_dq': {
                'shape': np.array([]),
                'value': np.uint8(0)
            },
            'zerop_param_dq_data': {
                'shape': np.array([]),
                'value': np.uint8(0)
            },
        },
                                nodes_with_edges_only=True)

        graph.stage = 'middle'
        DequantizeLinearResolver().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'out',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)