def test1(self):
        nodes_attributes = {
            'input_indices': {'shape': int64_array([5, 2]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
            'input_values': {'shape': int64_array([5]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},

            'sparse_to_dense': {'kind': 'op', 'op': 'SparseToDense'},
            'broadcast': {'kind': 'op', 'op': 'Broadcast'},
            'scatternd': {'kind': 'op', 'op': 'ScatterNDUpdate'},

            'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'},

            **const('input_dense_shape', int64_array([50, 40])),
            **const('input_default_value', int64_array(0))}

        graph = build_graph(nodes_attributes,
                            [('input_indices', 'sparse_to_dense', {'out': 0, 'in': 0}),
                             ('input_dense_shape', 'sparse_to_dense', {'out': 0, 'in': 1}),
                             ('input_values', 'sparse_to_dense', {'out': 0, 'in': 2}),
                             ('input_default_value', 'sparse_to_dense', {'out': 0, 'in': 3}),
                             ('sparse_to_dense', 'last', {'out': 0, 'in': 0})],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        SparseToDenseReplacer().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes,
                                [('input_default_value', 'broadcast', {'in': 0}),
                                 ('input_dense_shape', 'broadcast', {'in': 1}),
                                 ('broadcast', 'scatternd', {'in': 0}),
                                 ('input_indices', 'scatternd', {'in': 1}),
                                 ('input_values', 'scatternd', {'in': 2}),
                                 ('scatternd', 'last', {'in': 0})],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph, graph_ref, 'last', check_op_attrs=True)
        self.assertTrue(flag, resp)
    def setUp(self):
        self.start_node_name = 'StatefulPartitionedCall/Preprocessor/unstack'
        self.end_node_name = 'StatefulPartitionedCall/Preprocessor/stack'
        self.end_node_name2 = 'StatefulPartitionedCall/Preprocessor/stack2'
        self.loop_start_node_name = 'prefix/map/while/Preprocessor/unstack'
        self.loop_end_node_name = 'prefix/map/while/Preprocessor/stack'
        self.mul_const = float32_array([0.025, 0.374, -0.45])
        self.sub_const = float32_array([2.0, 3.0, 4.0])

        self.nodes = {
            **regular_op('input', {'type': 'Parameter'}),

            **regular_op('mul', {'op': 'Mul', 'type': 'Multiply', 'name': 'my_mul'}),
            **regular_op('sub', {'op': 'Sub', 'type': 'Subtract', 'name': 'my_sub'}),
            **const('mul_const', self.mul_const),
            **const('sub_const', self.sub_const),

            **regular_op(self.start_node_name, {'op': 'Identity'}),
            **regular_op(self.end_node_name, {'op': 'Identity'}),
            **regular_op(self.end_node_name2, {'op': 'Identity'}),

            **regular_op('loop', {'op': 'Loop', 'body': None}),

            **regular_op('resize', {'type': 'Interpolate'}),
            **result('result'),
        }
        self.replacement_desc = {'start_nodes': [self.start_node_name],
                                 'end_nodes': [self.end_node_name, self.end_node_name2]}
    def test_hsigmoid_with_relu_mul_different_tensors(self):
        graph = build_graph_with_edge_attrs(
            {
                **regular_op('input', {'type': 'Parameter'}),
                **regular_op('input_2', {'type': 'Parameter'}),
                **regular_op('add', {'op': 'Add'}),
                **regular_op('max', {'op': 'Maximum'}),
                **regular_op('min', {'op': 'Minimum'}),
                **regular_op('mul', {'op': 'Mul'}),
                **regular_op('mul_2', {
                    'op': 'Mul',
                    'name': 'final_mul'
                }),
                **const('const_0', float_array([0.0])),
                **const('const_3', float_array([3.0])),
                **const('const_6', float_array([6.0])),
                **const('const_1_6', float_array([1.0 / 6.0])),
                **result('result'),
            }, [('input_2', 'mul', {
                'in': 1,
                'out': 0
            }), ('input', 'add', {
                'in': 0,
                'out': 0
            }), ('const_3', 'add', {
                'in': 1,
                'out': 0
            }), ('add', 'max', {
                'in': 0,
                'out': 0
            }), ('const_0', 'max', {
                'in': 1,
                'out': 0
            }), ('max', 'min', {
                'in': 0,
                'out': 0
            }), ('const_6', 'min', {
                'in': 1,
                'out': 0
            }), ('min', 'mul', {
                'in': 0,
                'out': 0
            }), ('mul', 'mul_2', {
                'in': 0,
                'out': 0
            }), ('const_1_6', 'mul_2', {
                'in': 1,
                'out': 0
            }), ('mul_2', 'result', {
                'in': 0,
                'out': 0
            })])

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
 def build_body_graph(pre_processing: str):
     nodes = {
         **regular_op('input', {
             'type': 'Parameter',
             'op': 'Parameter'
         }),
         **regular_op('mul', {
             'op': 'Mul',
             'type': 'Multiply',
             'name': 'my_body_mul'
         }),
         **regular_op('sub', {
             'op': 'Sub',
             'type': 'Subtract',
             'name': 'my_body_sub'
         }),
         **const('body_mul_const', self.mul_const),
         **const('body_sub_const', self.sub_const),
         **regular_op(self.loop_start_node_name, {'op': 'Identity'}),
         **regular_op(self.loop_end_node_name, {'op': 'Identity'}),
         **regular_op('resize', {'type': 'Interpolate'}),
         **result('result'),
     }
     if pre_processing == 'no':
         edges = [
             *connect_front('input', self.loop_start_node_name),
             *connect_front(self.loop_start_node_name, 'resize'),
             *connect_front('resize', self.loop_end_node_name),
             *connect_front(self.loop_end_node_name, 'result'),
         ]
     elif pre_processing == 'trailing':
         edges = [
             *connect_front('input', self.loop_start_node_name),
             *connect_front(self.loop_start_node_name, 'resize'),
             *connect_front('resize', self.loop_end_node_name),
             *connect_front(self.loop_end_node_name, '0:mul'),
             *connect_front('body_mul_const', '1:mul'),
             *connect_front('body_sub_const', '0:sub'),
             *connect_front('mul', '1:sub'),
             *connect_front('sub', 'result'),
         ]
     else:
         edges = [
             *connect_front('input', '0:mul'),
             *connect_front('body_mul_const', '1:mul'),
             *connect_front('body_sub_const', '0:sub'),
             *connect_front('mul', '1:sub'),
             *connect_front('sub', self.loop_start_node_name),
             *connect_front(self.loop_start_node_name, 'resize'),
             *connect_front('resize', self.loop_end_node_name),
             *connect_front(self.loop_end_node_name, 'result'),
         ]
     graph = build_graph(nodes, edges, nodes_with_edges_only=True)
     graph.stage = 'front'
     return graph
    def test_packed(self):
        nodes = {
            **const('weights_inp', np.random.randn(100, 4)),
            **regular_op('indices_inp', {'type': 'Parameter'}),
            **regular_op('aten', {'type': None, 'kind': 'op', 'op': 'ATen', 'operator': 'embedding_bag', 'mode': 0,
                                  'name': 'my_aten'}),

            **regular_op('emb_bag', {'type': 'EmbeddingBagPackedSum', 'kind': 'op',
                                     'op': 'EmbeddingBagPackedSum'}),
            **result('result'),
        }
        edges = [('weights_inp', 'aten'),
                 ('indices_inp', 'aten'),
                 ('aten', 'result'),
                 ]
        graph = build_graph(nodes, edges)

        graph.graph['layout'] = 'NCHW'
        graph.stage = 'front'

        edges_ref = [('weights_inp', 'emb_bag'),
                     ('indices_inp', 'emb_bag'),
                     ('emb_bag', 'result'),
                     ]

        graph_ref = build_graph(nodes, edges_ref)

        AtenToEmbeddingBag().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
    def test(self):
        nodes = {
            **regular_op('input', {'type': 'Parameter'}),
            **const('depth', int64_array([2])),
            **regular_op('onehot', {
                'type': 'OneHot',
                'kind': 'op',
                'op': 'OneHot'
            }),
            **regular_op('reshape', {
                'type': 'Reshape',
                'kind': 'op',
                'op': 'Reshape'
            }),
            **const('reshape_dims', int64_array([])),
            **result('result'),
        }
        edges = [
            ('input', 'onehot'),
            ('depth', 'onehot'),
            ('onehot', 'result'),
        ]
        graph = build_graph(nodes, edges)

        graph.graph['layout'] = 'NCHW'
        graph.stage = 'front'

        edges_ref = [
            ('input', 'onehot'),
            ('depth', 'reshape'),
            ('reshape_dims', 'reshape'),
            ('reshape', 'onehot'),
            ('onehot', 'result'),
        ]

        graph_ref = build_graph(nodes, edges_ref)

        OneHotDepthNormalizer().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Beispiel #7
0
    def test_attributed_slice_replacer(self, attributed_slice_attrs):
        nodes = {
            **regular_op_with_empty_data('input', {'type': 'Parameter'}),
            **regular_op_with_empty_data('attributed_slice', attributed_slice_attrs),
            **result(),

            # nodes after replacement
            **const('start', np.array([0, 0])),
            **const('end', np.array([1, -1])),
            **const('axis', np.array(np.array([0, 1]))),
            **regular_op_with_empty_data('slice', {
                'op': 'Slice',
                'type': None
            }),
        }

        graph = build_graph(nodes_attrs=nodes,
                            edges=[
                                ('input', 'attributed_slice'),
                                ('attributed_slice', 'output'),
                            ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'

        AttributedSliceToSliceReplacer().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attrs=nodes,
                                edges=[
                                    ('input', 'slice'),
                                    *connect_front('start', '1:slice'),
                                    *connect_front('end', '2:slice'),
                                    *connect_front('axis', '3:slice'),
                                    ('slice', 'output'),
                                ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'output',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
    def test2_not_applied_transform(self):
        # the transformation is not applied if begin and end are constant
        nodes_attributes = {
            # nodes from original graph
            'input': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
            'begin': {'type': 'Pack', 'kind': 'op', 'op': 'Pack'},
            **const('begin_1', int64_array(0)),
            **const('begin_2', int64_array(0)),
            **const('begin_3', int64_array(0)),
            'end': {'type': 'Pack', 'kind': 'op', 'op': 'Pack'},
            **const('end_1', int64_array(0)),
            **const('end_2', int64_array(3)),
            **const('end_3', int64_array(0)),
            **const('step', int64_array([1, 1, 1])),
            'strided_slice': {'type': 'StridedSlice', 'kind': 'op', 'op': 'StridedSlice',
                              'begin_mask': int64_array([0, 1, 0]), 'end_mask': int64_array([0, 1, 0]),
                              'shrink_axis_mask': int64_array([0, 1, 0]), 'name': 'non_const_begin_strided_slice'},
            'result': {'type': 'Result', 'kind': 'op', 'op': 'Result'},
        }

        graph = build_graph(nodes_attributes,
                            [('input', 'strided_slice', {'out': 0, 'in': 0}),
                             ('begin_1', 'begin', {'out': 0, 'in': 0}),
                             ('begin_2', 'begin', {'out': 0, 'in': 1}),
                             ('begin_3', 'begin', {'out': 0, 'in': 2}),
                             ('begin', 'strided_slice', {'out': 0, 'in': 1}),
                             ('end_1', 'end', {'out': 0, 'in': 0}),
                             ('end_2', 'end', {'out': 0, 'in': 1}),
                             ('end_3', 'end', {'out': 0, 'in': 2}),
                             ('end', 'strided_slice', {'out': 0, 'in': 2}),
                             ('step', 'strided_slice', {'out': 0, 'in': 3}),
                             ('strided_slice', 'result', {'out': 0, 'in': 0}),
                             ], nodes_with_edges_only=True)
        graph.stage = 'front'
        NonConstBeginStridedSliceReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes,
                                [('input', 'strided_slice', {'out': 0, 'in': 0}),
                                 ('begin_1', 'begin', {'out': 0, 'in': 0}),
                                 ('begin_2', 'begin', {'out': 0, 'in': 1}),
                                 ('begin_3', 'begin', {'out': 0, 'in': 2}),
                                 ('begin', 'strided_slice', {'out': 0, 'in': 1}),
                                 ('end_1', 'end', {'out': 0, 'in': 0}),
                                 ('end_2', 'end', {'out': 0, 'in': 1}),
                                 ('end_3', 'end', {'out': 0, 'in': 2}),
                                 ('end', 'strided_slice', {'out': 0, 'in': 2}),
                                 ('step', 'strided_slice', {'out': 0, 'in': 3}),
                                 ('strided_slice', 'result', {'out': 0, 'in': 0})],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result', check_op_attrs=True)
        self.assertTrue(flag, resp)
Beispiel #9
0
    def build_conv_graph():
        nodes = {
            **const('weights', np.random.randn(1, 1, 1, 1)),
            **regular_op('input', {'op': 'Parameter'}),
            **regular_op('conv', {'op': 'Conv2D', 'layout': 'NHWC'}),
            **result('result'),
        }
        edges = [*connect_front('input', '0:conv'),
                 *connect_front('weights', '1:conv'),
                 *connect_front('conv:0', 'result'),
                 ]
        graph = build_graph(nodes, edges)

        graph.stage = 'front'
        return graph
Beispiel #10
0
class SoftplusFusionTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {'type': 'Parameter'}),
        **regular_op('exp', {'op': 'Exp'}),
        **const('const_1', float_array([1.0])),
        **regular_op('add', {'op': 'Add'}),
        **regular_op('ln', {'op': 'Log', 'name': 'final_log'}),
        **result('result'),
    }

    edges = [('input', 'exp', {'in': 0, 'out': 0}),
             ('const_1', 'add', {'in': 0, 'out': 0}),
             ('exp', 'add', {'in': 1, 'out': 0}),
             ('add', 'ln', {'in': 0, 'out': 0}),
             ('ln', 'result', {'in': 0, 'out': 0})]

    def test_softplus_fusion_test(self):
        graph = build_graph_with_edge_attrs(self.nodes, self.edges, {})

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        SoftplusFusion().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(len(graph.get_op_nodes(name='final_log')) == 1 and
                        graph.get_op_nodes(name='final_log')[0].op == 'SoftPlus')

    def test_softplus_fusion_test_wrong_const(self):
        graph = build_graph_with_edge_attrs(self.nodes, self.edges, {'const_1': {'value': float_array([0.9999])}})

        graph_ref = graph.copy()
        graph.stage = 'front'

        SoftplusFusion().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Beispiel #11
0
class TimeheightconvolutionReplacerTest(unittest.TestCase):
    nodes = {
        **regular_op('placeholder', {}),
        **regular_op('timeheightconv', {
            'op': 'timeheightconvolutioncomponent'
        }),
        **const('weights', int64_array([])),
        **const('biases', int64_array([])),
        **regular_op('placeholder_out', {}),
        **regular_op('concat', {
            'type': 'Concat',
            'axis': 1
        }),
        **regular_op('memoryoffset_0', {
            'type': None,
            'op': 'MemoryOffset',
            't': -1,
            'has_default': False
        }),
        **regular_op('memoryoffset_1', {
            'type': None,
            'op': 'MemoryOffset',
            't': 0,
            'has_default': False
        }),
        **regular_op('memoryoffset_2', {
            'type': None,
            'op': 'MemoryOffset',
            't': 1,
            'has_default': True
        }),
        **regular_op(
            'conv', {
                'op': 'Convolution',
                'type': 'Convolution',
                'output': 12,
                'height_in': 80
            }),
    }

    def test_timeheightconvolution_1offset(self):
        graph = build_graph(self.nodes, [
            *connect_front('placeholder', '0:timeheightconv'),
            *connect_front('weights', '1:timeheightconv'),
            *connect_front('biases', '2:timeheightconv'),
            *connect_front('timeheightconv', 'placeholder_out')
        ],
                            nodes_with_edges_only=True)

        graph.stage = 'front'

        conv = graph.nodes['timeheightconv']
        conv['height_subsample'] = 1
        conv['height_in'] = 80
        conv['height_out'] = 80
        conv['in_channels'] = 1
        conv['out_channels'] = 12
        conv['offsets'] = int64_array([[-1, -1], [-1, 0], [-1, 1]])
        conv['time_offsets'] = [-1]
        graph.nodes['weights']['value'] = np.zeros([36])

        ref_graph = build_graph(self.nodes, [
            *connect_front('placeholder', 'memoryoffset_0'),
            *connect_front('memoryoffset_0', '0:concat'),
            *connect_front('concat', '0:conv'),
            *connect_front('weights', '1:conv'),
            *connect_front('biases', '2:conv'),
            *connect_front('conv', 'placeholder_out')
        ],
                                nodes_with_edges_only=True)
        ref_graph.nodes['weights']['value'] = np.zeros([36])
        new_conv = ref_graph.nodes['conv']
        new_conv['pad'] = int64_array([[0, 0], [0, 0], [0, 0], [1, 1]])
        new_conv['dilation'] = int64_array([1, 1, 1, 1])
        new_conv['kernel'] = int64_array([12, 1, 1, 3])
        new_conv['stride'] = int64_array([1, 1, 1, 1])

        ReplaceTimeHeightConvolutionPattern().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'placeholder_out',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_timeheightconvolution_2_offsets(self):
        graph = build_graph(self.nodes, [
            *connect_front('placeholder', '0:timeheightconv'),
            *connect_front('weights', '1:timeheightconv'),
            *connect_front('biases', '2:timeheightconv'),
            *connect_front('timeheightconv', 'placeholder_out')
        ],
                            nodes_with_edges_only=True)

        graph.stage = 'front'

        conv = graph.nodes['timeheightconv']
        conv['height_subsample'] = 1
        conv['height_in'] = 80
        conv['height_out'] = 80
        conv['in_channels'] = 1
        conv['out_channels'] = 12
        conv['offsets'] = int64_array([[-1, -1], [-1, 0], [-1, 1], [0, -1],
                                       [0, 0], [0, 1]])
        conv['time_offsets'] = int64_array([-1, 0])
        graph.nodes['weights']['value'] = np.zeros([72])

        ref_graph = build_graph(self.nodes, [
            *connect_front('placeholder', 'memoryoffset_0'),
            *connect_front('placeholder', 'memoryoffset_1'),
            *connect_front('memoryoffset_0', '0:concat'),
            *connect_front('memoryoffset_1', '1:concat'),
            *connect_front('concat', '0:conv'),
            *connect_front('weights', '1:conv'),
            *connect_front('biases', '2:conv'),
            *connect_front('conv', 'placeholder_out')
        ],
                                nodes_with_edges_only=True)
        ref_graph.nodes['weights']['value'] = np.zeros([72])
        new_conv = ref_graph.nodes['conv']
        new_conv['pad'] = int64_array([[0, 0], [0, 0], [0, 0], [1, 1]])
        new_conv['dilation'] = int64_array([1, 1, 1, 1])
        new_conv['kernel'] = int64_array([12, 1, 2, 3])
        new_conv['stride'] = int64_array([1, 1, 1, 1])

        ReplaceTimeHeightConvolutionPattern().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'placeholder_out',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_timeheightconvolution_2_offsets_def(self):
        graph = build_graph(self.nodes, [
            *connect_front('placeholder', '0:timeheightconv'),
            *connect_front('weights', '1:timeheightconv'),
            *connect_front('biases', '2:timeheightconv'),
            *connect_front('timeheightconv', 'placeholder_out')
        ],
                            nodes_with_edges_only=True)

        graph.stage = 'front'

        conv = graph.nodes['timeheightconv']
        conv['height_subsample'] = 1
        conv['height_in'] = 80
        conv['height_out'] = 80
        conv['in_channels'] = 1
        conv['out_channels'] = 12
        conv['offsets'] = int64_array([[0, -1], [0, 0], [0, 1], [1, -1],
                                       [1, 0], [1, 1]])
        conv['time_offsets'] = int64_array([0])
        graph.nodes['weights']['value'] = np.zeros([72])

        ref_graph = build_graph(self.nodes, [
            *connect_front('placeholder', 'memoryoffset_1'),
            *connect_front('placeholder', 'memoryoffset_2'),
            *connect_front('memoryoffset_1', '0:concat'),
            *connect_front('memoryoffset_2', '1:concat'),
            *connect_front('concat', '0:conv'),
            *connect_front('weights', '1:conv'),
            *connect_front('biases', '2:conv'),
            *connect_front('conv', 'placeholder_out')
        ],
                                nodes_with_edges_only=True)
        ref_graph.nodes['weights']['value'] = np.zeros([72])
        new_conv = ref_graph.nodes['conv']
        new_conv['pad'] = int64_array([[0, 0], [0, 0], [0, 0], [1, 1]])
        new_conv['dilation'] = int64_array([1, 1, 1, 1])
        new_conv['kernel'] = int64_array([12, 1, 2, 3])
        new_conv['stride'] = int64_array([1, 1, 1, 1])

        ReplaceTimeHeightConvolutionPattern().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'placeholder_out',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_timeheightconvolution_2_offsets_dilation(self):
        graph = build_graph(self.nodes, [
            *connect_front('placeholder', '0:timeheightconv'),
            *connect_front('weights', '1:timeheightconv'),
            *connect_front('biases', '2:timeheightconv'),
            *connect_front('timeheightconv', 'placeholder_out')
        ],
                            nodes_with_edges_only=True)

        graph.stage = 'front'

        conv = graph.nodes['timeheightconv']
        conv['height_subsample'] = 1
        conv['height_in'] = 80
        conv['height_out'] = 80
        conv['in_channels'] = 1
        conv['out_channels'] = 12
        conv['offsets'] = int64_array([[-1, -3], [-1, 0], [-1, 3], [1, -3],
                                       [1, 0], [1, 3]])
        conv['time_offsets'] = int64_array([-1])
        graph.nodes['weights']['value'] = np.zeros([72])

        ref_graph = build_graph(self.nodes, [
            *connect_front('placeholder', 'memoryoffset_0'),
            *connect_front('placeholder', 'memoryoffset_2'),
            *connect_front('memoryoffset_0', '0:concat'),
            *connect_front('memoryoffset_2', '1:concat'),
            *connect_front('concat', '0:conv'),
            *connect_front('weights', '1:conv'),
            *connect_front('biases', '2:conv'),
            *connect_front('conv', 'placeholder_out')
        ],
                                nodes_with_edges_only=True)
        ref_graph.nodes['weights']['value'] = np.zeros([72])
        new_conv = ref_graph.nodes['conv']
        new_conv['pad'] = int64_array([[0, 0], [0, 0], [0, 0], [3, 3]])
        new_conv['dilation'] = int64_array([1, 1, 2, 3])
        new_conv['kernel'] = int64_array([12, 1, 2, 3])
        new_conv['stride'] = int64_array([1, 1, 1, 1])

        ReplaceTimeHeightConvolutionPattern().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'placeholder_out',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_timeheightconvolution_2_offsets_pad(self):
        graph = build_graph(self.nodes, [
            *connect_front('placeholder', '0:timeheightconv'),
            *connect_front('weights', '1:timeheightconv'),
            *connect_front('biases', '2:timeheightconv'),
            *connect_front('timeheightconv', 'placeholder_out')
        ],
                            nodes_with_edges_only=True)

        graph.stage = 'front'
        conv = graph.nodes['timeheightconv']
        conv['height_subsample'] = 1
        conv['height_in'] = 80
        conv['height_out'] = 74
        conv['in_channels'] = 1
        conv['out_channels'] = 12
        conv['offsets'] = int64_array([[-1, 0], [-1, 3], [-1, 6], [1, 0],
                                       [1, 3], [1, 6]])
        conv['time_offsets'] = int64_array([-1])
        graph.nodes['weights']['value'] = np.zeros([72])

        ref_graph = build_graph(self.nodes, [
            *connect_front('placeholder', 'memoryoffset_0'),
            *connect_front('placeholder', 'memoryoffset_2'),
            *connect_front('memoryoffset_0', '0:concat'),
            *connect_front('memoryoffset_2', '1:concat'),
            *connect_front('concat', '0:conv'),
            *connect_front('weights', '1:conv'),
            *connect_front('biases', '2:conv'),
            *connect_front('conv', 'placeholder_out')
        ],
                                nodes_with_edges_only=True)
        ref_graph.nodes['weights']['value'] = np.zeros([72])
        new_conv = ref_graph.nodes['conv']
        new_conv['pad'] = int64_array([[0, 0], [0, 0], [0, 0], [0, 0]])
        new_conv['dilation'] = int64_array([1, 1, 2, 3])
        new_conv['kernel'] = int64_array([12, 1, 2, 3])
        new_conv['stride'] = int64_array([1, 1, 1, 1])

        ReplaceTimeHeightConvolutionPattern().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'placeholder_out',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_timeheightconvolution_out_channels(self):
        graph = build_graph(self.nodes, [
            *connect_front('placeholder', '0:timeheightconv'),
            *connect_front('weights', '1:timeheightconv'),
            *connect_front('biases', '2:timeheightconv'),
            *connect_front('timeheightconv', 'placeholder_out')
        ],
                            nodes_with_edges_only=True)

        graph.stage = 'front'
        conv = graph.nodes['timeheightconv']
        conv['height_subsample'] = 1
        conv['height_in'] = 80
        conv['height_out'] = 74
        conv['in_channels'] = 3
        conv['out_channels'] = 4
        conv['offsets'] = int64_array([[-1, 0], [-1, 3], [-1, 6], [1, 0],
                                       [1, 3], [1, 6]])
        conv['time_offsets'] = int64_array([-1])
        graph.nodes['weights']['value'] = np.array([
            1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
            20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36,
            37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53,
            54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70,
            71, 72
        ])

        ref_graph = build_graph(self.nodes, [
            *connect_front('placeholder', 'memoryoffset_0'),
            *connect_front('placeholder', 'memoryoffset_2'),
            *connect_front('memoryoffset_0', '0:concat'),
            *connect_front('memoryoffset_2', '1:concat'),
            *connect_front('concat', '0:conv'),
            *connect_front('weights', '1:conv'),
            *connect_front('biases', '2:conv'),
            *connect_front('conv', 'placeholder_out')
        ],
                                nodes_with_edges_only=True)
        ref_graph.nodes['weights']['value'] = np.array([
            1, 4, 7, 10, 13, 16, 2, 5, 8, 11, 14, 17, 3, 6, 9, 12, 15, 18, 19,
            22, 25, 28, 31, 34, 20, 23, 26, 29, 32, 35, 21, 24, 27, 30, 33, 36,
            37, 40, 43, 46, 49, 52, 38, 41, 44, 47, 50, 53, 39, 42, 45, 48, 51,
            54, 55, 58, 61, 64, 67, 70, 56, 59, 62, 65, 68, 71, 57, 60, 63, 66,
            69, 72
        ])
        new_conv = ref_graph.nodes['conv']
        new_conv['output'] = 4
        new_conv['pad'] = int64_array([[0, 0], [0, 0], [0, 0], [0, 0]])
        new_conv['dilation'] = int64_array([1, 1, 2, 3])
        new_conv['kernel'] = int64_array([4, 3, 2, 3])
        new_conv['stride'] = int64_array([1, 1, 1, 1])

        ReplaceTimeHeightConvolutionPattern().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'placeholder_out',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_timeheightconvolution_2_offsets_stride(self):
        graph = build_graph(self.nodes, [
            *connect_front('placeholder', '0:timeheightconv'),
            *connect_front('weights', '1:timeheightconv'),
            *connect_front('biases', '2:timeheightconv'),
            *connect_front('timeheightconv', 'placeholder_out')
        ],
                            nodes_with_edges_only=True)

        graph.stage = 'front'
        conv = graph.nodes['timeheightconv']
        conv['height_subsample'] = 2
        conv['height_in'] = 80
        conv['height_out'] = 37
        conv['in_channels'] = 1
        conv['out_channels'] = 12
        conv['offsets'] = int64_array([[-1, 0], [-1, 3], [-1, 6], [1, 0],
                                       [1, 3], [1, 6]])
        conv['time_offsets'] = int64_array([-1])
        graph.nodes['weights']['value'] = np.zeros([72])

        ref_graph = build_graph(self.nodes, [
            *connect_front('placeholder', 'memoryoffset_0'),
            *connect_front('placeholder', 'memoryoffset_2'),
            *connect_front('memoryoffset_0', '0:concat'),
            *connect_front('memoryoffset_2', '1:concat'),
            *connect_front('concat', '0:conv'),
            *connect_front('weights', '1:conv'),
            *connect_front('biases', '2:conv'),
            *connect_front('conv', 'placeholder_out')
        ],
                                nodes_with_edges_only=True)
        ref_graph.nodes['weights']['value'] = np.zeros([72])
        new_conv = ref_graph.nodes['conv']
        new_conv['pad'] = int64_array([[0, 0], [0, 0], [0, 0], [0, 0]])
        new_conv['dilation'] = int64_array([1, 1, 2, 3])
        new_conv['kernel'] = int64_array([12, 1, 2, 3])
        new_conv['stride'] = int64_array([1, 1, 1, 2])

        ReplaceTimeHeightConvolutionPattern().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'placeholder_out',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
from extensions.front.onnx.MvnOnnxToMvn import MvnOnnxToMvn
from mo.front.common.partial_infer.utils import int64_array
from mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph, regular_op_with_empty_data, result, const, connect_front

nodes = {
    **regular_op_with_empty_data('input', {'type': 'Parameter'}),
    **regular_op_with_empty_data('mvn_onnx', {'op': 'MVNOnnx',
                                              'axes': int64_array([2, 3]),
                                              'eps': 1e-9,
                                              'eps_mode': 'outside_sqrt',
                                              'normalize_variance': 1}),
    **result(),

    # nodes after replacement
    **const('axes', int64_array([2, 3])),
    **regular_op_with_empty_data('mvn', {'op': 'MVN', 'type': None}),
}


class MvnOnnxToMvnTest(unittest.TestCase):
    def test_mvn_normalize(self):
        graph = build_graph(nodes, [('input', 'mvn_onnx'),
                                    ('mvn_onnx', 'output')],
                            nodes_with_edges_only=True)
        graph.stage = 'front'

        MvnOnnxToMvn().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes, [('input', 'mvn'),
                                        *connect_front('axes', '1:mvn'),
from unit_tests.utils.graph import build_graph, const, result, regular_op

nodes_attributes = {
    **regular_op('placeholder', {'type': 'Parameter'}),
    **regular_op(
        'attr_roll', {
            'type': 'AttributedRoll',
            'op': 'AttributedRoll',
            'axes': int64_array([-1, 2, 3]),
            'shift': int64_array([5, -2, 3])
        }),
    **result('result'),

    # new Roll node and inputs
    **regular_op('roll', {'type': 'Roll'}),
    **const('roll_axes', int64_array([-1, 2, 3])),
    **const('roll_shift', int64_array([5, -2, 3]))
}


class AttributedRollToRollTest(unittest.TestCase):
    def test_axes_shift(self):
        graph = build_graph(nodes_attributes, [('placeholder', 'attr_roll', {
            'in': 0,
            'out': 0
        }), ('attr_roll', 'result', {
            'in': 0,
            'out': 0
        })], {},
                            nodes_with_edges_only=True)
Beispiel #14
0
from extensions.front.AttributedRandomUniformToRandomUniform import AttributedRandomUniformToRandomUniform
from mo.front.common.partial_infer.utils import int64_array, float32_array
from mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph, const, result, regular_op

nodes = {
    **regular_op('placeholder', {'type': 'Parameter'}),
    **regular_op('attr_random_uniform', {'type': 'AttributedRandomUniform', 'op': 'AttributedRandomUniform',
                                         'output_type': np.float32,
                                         'min_val': float32_array([-1.5]), 'max_val': float32_array([10.7]),
                                         'shape': int64_array([5, 4, 3])}),
    **result('result'),

    # new RandomUniform node and inputs
    **regular_op('random_uniform', {'type': 'RandomUniform'}),
    **const('min_val', float32_array([-1.5])),
    **const('max_val', float32_array([10.7])),
    **const('shape', int64_array([5, 4, 3])),
}


class AttributedRandomUniformToRandomUniformTest(unittest.TestCase):
    def test_min_max(self):
        graph = build_graph(nodes,
                            [('placeholder', 'attr_random_uniform', {'in': 0, 'out': 0}),
                             ('attr_random_uniform', 'result', {'in': 0, 'out': 0})], {}, nodes_with_edges_only=True)

        graph_ref = build_graph(nodes,
                                [('placeholder', 'random_uniform', {'in': 0, 'out': 0}),
                                 ('min_val', 'random_uniform', {'in': 1, 'out': 0}),
                                 ('max_val', 'random_uniform', {'in': 2, 'out': 0}),
class AddReshapeTransposeAroundConvPoolTests(unittest.TestCase):
    nodes = {
        **shaped_parameter('input', [1, 30]),
        **regular_op('splice', {
            'op': 'Splice',
            'context': [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]
        }),
        **regular_op(
            'conv', {
                'kind': 'op',
                'op': 'Convolution',
                'kernel': [1, 11, 1, 5],
                'patch_stride': 5,
                'kernel_spatial': [1, 5]
            }),
        **regular_op(
            'pool', {
                'kind': 'op',
                'op': 'Pooling',
                'pool_stride': 5,
                'pool_step': [1, 1, 1, 1]
            }),
        **regular_op('out_op', {'op': "SomeOp"}),
    }

    ref_nodes = {
        **shaped_parameter('input', [1, 30]),
        **regular_op('splice', {
            'op': 'Splice',
            'context': [-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]
        }),
        **regular_op('shapeof', {
            'op': 'ShapeOf',
            'type': 'ShapeOf'
        }),
        **const('ind', int64_array([0])),
        **const('axis', int64_array(0)),
        **regular_op('gather_batch', {
            'op': 'Gather',
            'type': 'Gather'
        }),
        **const('t', int64_array([11])),
        **const('h', int64_array([5])),
        **const('ind_h', int64_array([1])),
        **regular_op('gather_h', {
            'op': "Gather",
            'type': 'Gather'
        }),
        **const('th', int64_array([55])),
        **regular_op('div', {
            'op': 'Div',
            'type': 'Divide'
        }),
        **regular_op('concat', {
            'op': 'Concat',
            'type': 'Concat'
        }),
        **regular_op('reshape_in', {
            'op': 'Reshape',
            'type': 'Reshape'
        }),
        **const('transpose_in_order', int64_array([0, 3, 1, 2])),
        **regular_op('transpose_in', {
            'op': 'Transpose',
            'type': 'Transpose'
        }),
        **regular_op('conv', {
            'kind': 'op',
            'op': 'Convolution',
            'kernel': [1, 1, 11, 5]
        }),
        **regular_op(
            'pool', {
                'kind': 'op',
                'op': 'Pooling',
                'pool_stride': 5,
                'pool_step': [1, 1, 1, 1]
            }),
        **const('transpose_out_order', int64_array([0, 2, 3, 1])),
        **regular_op('transpose_out', {
            'op': 'Transpose',
            'type': 'Transpose'
        }),
        **const('reshape_out_shape', int64_array([0, -1])),
        **regular_op('reshape_out', {
            'op': 'Reshape',
            'type': 'Reshape'
        }),
        **regular_op('out_op', {'op': "SomeOp"})
    }

    def test_simple_convolution(self):
        graph = build_graph(self.nodes, [
            *connect_front('input', 'splice'),
            *connect_front('splice', 'conv'), *connect_front('conv', 'out_op')
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        AddReshapeTransposeAroundConvPool.find_and_replace_pattern(graph)

        ref_graph = build_graph(self.ref_nodes, [
            *connect_front('input', 'splice'),
            *connect_front('splice', '0:reshape_in'),
            *connect_front('splice', 'shapeof'),
            *connect_front('shapeof:0', '0:gather_batch'),
            *connect_front('ind', '1:gather_batch'),
            *connect_front('axis', '2:gather_batch'),
            *connect_front('shapeof:0', '0:gather_h'),
            *connect_front('ind_h', '1:gather_h'),
            *connect_front('axis', '2:gather_h'),
            *connect_front('gather_h', '0:div'), *connect_front('th', '1:div'),
            *connect_front('gather_batch', '0:concat'),
            *connect_front('t', '1:concat'), *connect_front('h', '2:concat'),
            *connect_front('div', '3:concat'),
            *connect_front('concat', '1:reshape_in'),
            *connect_front('reshape_in', '0:transpose_in'),
            *connect_front('transpose_in_order', "1:transpose_in"),
            *connect_front('transpose_in', 'conv'),
            *connect_front('conv', '0:transpose_out'),
            *connect_front('transpose_out_order', '1:transpose_out'),
            *connect_front('transpose_out', '0:reshape_out'),
            *connect_front('reshape_out_shape', '1:reshape_out'),
            *connect_front('reshape_out', 'out_op')
        ])

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'out_op',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_simple_pooling(self):
        graph = build_graph(self.nodes, [
            *connect_front('input', 'splice'),
            *connect_front('splice', 'pool'), *connect_front('pool', 'out_op')
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        AddReshapeTransposeAroundConvPool.find_and_replace_pattern(graph)

        ref_graph = build_graph(self.ref_nodes, [
            *connect_front('input', 'splice'),
            *connect_front('splice', '0:reshape_in'),
            *connect_front('splice', 'shapeof'),
            *connect_front('shapeof:0', '0:gather_batch'),
            *connect_front('ind', '1:gather_batch'),
            *connect_front('axis', '2:gather_batch'),
            *connect_front('shapeof:0', '0:gather_h'),
            *connect_front('ind_h', '1:gather_h'),
            *connect_front('axis', '2:gather_h'),
            *connect_front('gather_h', '0:div'), *connect_front('th', '1:div'),
            *connect_front('gather_batch', '0:concat'),
            *connect_front('t', '1:concat'), *connect_front('h', '3:concat'),
            *connect_front('div', '2:concat'),
            *connect_front('concat', '1:reshape_in'),
            *connect_front('reshape_in', '0:transpose_in'),
            *connect_front('transpose_in_order', "1:transpose_in"),
            *connect_front('transpose_in', 'pool'),
            *connect_front('pool', '0:transpose_out'),
            *connect_front('transpose_out_order', '1:transpose_out'),
            *connect_front('transpose_out', '0:reshape_out'),
            *connect_front('reshape_out_shape', '1:reshape_out'),
            *connect_front('reshape_out', 'out_op')
        ])

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'out_op',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
    def test2(self):
        nodes_attributes = {
            'input_indices': {
                'shape': int64_array([5, 2]),
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'input_values': {
                'shape': int64_array([5]),
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'input_dense_shape': {
                'shape': int64_array([2]),
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'input_params_table': {
                'shape': int64_array([10, 3, 4]),
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'input_default_value': {
                'shape': int64_array([]),
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'identity_spw': {
                'kind': 'op',
                'op': 'Identity'
            },
            'gather0_1': {
                'kind': 'op',
                'op': 'Gather',
                'type': 'Gather'
            },
            'gather0_2': {
                'kind': 'op',
                'op': 'Gather',
                'type': 'Gather'
            },
            'reshape0': {
                'kind': 'op',
                'op': 'Reshape'
            },
            'where0': {
                'kind': 'op',
                'op': 'Where'
            },
            'greaterequal0': {
                'kind': 'op',
                'op': 'GreaterEqual'
            },
            'sparse_fill_empty_rows': {
                'kind': 'op',
                'op': 'SparseFillEmptyRows'
            },
            'unique': {
                'kind': 'op',
                'op': 'Unique'
            },
            'strided_slice': {
                'kind': 'op',
                'op': 'StridedSlice'
            },
            'cast': {
                'kind': 'op',
                'op': 'Cast'
            },
            'gather': {
                'kind': 'op',
                'op': 'Gather',
                'type': 'Gather'
            },
            'identity': {
                'kind': 'op',
                'op': 'Identity'
            },
            'identity_1': {
                'kind': 'op',
                'op': 'Identity'
            },
            'sparse_segment_mean': {
                'kind': 'op',
                'op': 'SparseSegmentMean'
            },
            'reshape': {
                'kind': 'op',
                'op': 'Reshape'
            },
            'tile': {
                'kind': 'op',
                'op': 'Tile',
                'type': 'Tile'
            },
            'select': {
                'kind': 'op',
                'op': 'Select'
            },
            'split_for_indices': {
                'kind': 'op',
                'op': 'Split'
            },
            'squeeze_for_indices': {
                'kind': 'op',
                'op': 'Squeeze'
            },
            'split_for_dense_shape': {
                'kind': 'op',
                'op': 'Split'
            },
            'squeeze_to_scalar': {
                'kind': 'op',
                'op': 'Squeeze'
            },
            'cast_indices': {
                'kind': 'op',
                'op': 'Cast'
            },
            'cast_segment_ids': {
                'kind': 'op',
                'op': 'Cast'
            },
            'cast_default_value': {
                'kind': 'op',
                'op': 'Cast'
            },
            'cast_number_segments': {
                'kind': 'op',
                'op': 'Cast'
            },
            'embedding_segments_mean': {
                'kind': 'op',
                'op': 'EmbeddingSegmentsMean'
            },
            **const('split_for_indices_axis', int64_array(1)),
            **const('split_for_dense_shape_axis', int64_array(0)),
            **const('squeeze_axis', int64_array([0])),
            **const('squeeze_for_indices_axis', int64_array([1])),
            'last': {
                'type': None,
                'value': None,
                'kind': 'op',
                'op': 'Result'
            },
        }

        graph = build_graph(
            nodes_attributes,
            [('input_indices', 'gather0_1', {
                'out': 0,
                'in': 0
            }), ('input_dense_shape', 'identity_spw', {
                'out': 0,
                'in': 0
            }), ('input_values', 'greaterequal0', {
                'out': 0,
                'in': 0
            }),
             ('input_values', 'gather0_2', {
                 'out': 0,
                 'in': 0
             }), ('input_params_table', 'gather', {
                 'out': 0,
                 'in': 0
             }),
             ('input_default_value', 'sparse_fill_empty_rows', {
                 'out': 0,
                 'in': 3
             }), ('identity_spw', 'sparse_fill_empty_rows', {
                 'out': 0,
                 'in': 2
             }), ('gather0_1', 'sparse_fill_empty_rows', {
                 'out': 0,
                 'in': 0
             }), ('gather0_2', 'sparse_fill_empty_rows', {
                 'out': 0,
                 'in': 1
             }), ('reshape0', 'gather0_1', {
                 'out': 0,
                 'in': 1
             }), ('reshape0', 'gather0_2', {
                 'out': 0,
                 'in': 1
             }), ('where0', 'reshape0', {
                 'out': 0,
                 'in': 0
             }), ('greaterequal0', 'where0', {
                 'out': 0,
                 'in': 0
             }), ('sparse_fill_empty_rows', 'unique', {
                 'out': 1,
                 'in': 0
             }),
             ('sparse_fill_empty_rows', 'strided_slice', {
                 'out': 0,
                 'in': 0
             }), ('sparse_fill_empty_rows', 'reshape', {
                 'out': 2,
                 'in': 0
             }), ('unique', 'sparse_segment_mean', {
                 'out': 1,
                 'in': 1
             }), ('unique', 'gather', {
                 'out': 0,
                 'in': 1
             }), ('strided_slice', 'cast', {
                 'out': 0,
                 'in': 0
             }), ('gather', 'identity', {
                 'out': 0,
                 'in': 0
             }), ('identity', 'identity_1', {
                 'out': 0,
                 'in': 0
             }), ('identity_1', 'sparse_segment_mean', {
                 'out': 0,
                 'in': 0
             }), ('cast', 'sparse_segment_mean', {
                 'out': 0,
                 'in': 2
             }), ('sparse_segment_mean', 'select', {
                 'out': 0,
                 'in': 2
             }), ('reshape', 'tile', {
                 'out': 0,
                 'in': 0
             }), ('tile', 'select', {
                 'out': 0,
                 'in': 0
             }), ('select', 'last', {
                 'out': 0,
                 'in': 0
             })],
            nodes_with_edges_only=True)
        graph.stage = 'front'
        EmbeddingSegmentsOperationMultipleFeaturesFusing(
        ).find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes, [
            ('input_indices', 'split_for_indices', {
                'in': 0
            }),
            ('split_for_indices_axis', 'split_for_indices', {
                'in': 1
            }),
            ('split_for_indices', 'squeeze_for_indices', {
                'in': 0
            }),
            ('squeeze_for_indices_axis', 'squeeze_for_indices', {
                'in': 1
            }),
            ('squeeze_for_indices', 'cast_segment_ids', {
                'in': 0
            }),
            ('cast_segment_ids', 'embedding_segments_mean', {
                'in': 2,
                'out': 0
            }),
            ('input_values', 'cast_indices', {
                'in': 0
            }),
            ('cast_indices', 'embedding_segments_mean', {
                'in': 1
            }),
            ('input_dense_shape', 'split_for_dense_shape', {
                'in': 0
            }),
            ('split_for_dense_shape_axis', 'split_for_dense_shape', {
                'in': 1
            }),
            ('split_for_dense_shape', 'squeeze_to_scalar', {
                'in': 0
            }),
            ('squeeze_axis', 'squeeze_to_scalar', {
                'in': 1
            }),
            ('squeeze_to_scalar', 'cast_number_segments', {
                'in': 0
            }),
            ('cast_number_segments', 'embedding_segments_mean', {
                'in': 3,
                'out': 0
            }),
            ('input_params_table', 'embedding_segments_mean', {
                'in': 0
            }),
            ('input_default_value', 'cast_default_value', {
                'in': 0
            }),
            ('cast_default_value', 'embedding_segments_mean', {
                'in': 4
            }),
            ('embedding_segments_mean', 'last', {
                'in': 0
            }),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph, regular_op, result, build_graph_with_edge_attrs, const

nodes = {
    **regular_op('input', {'type': 'Parameter'}),
    **regular_op('relu', {'type': 'Relu'}),
    **regular_op('pooling', {
        'type': 'Pooling',
        'global_pool': True,
        'pool_method': 'avg'
    }),
    **result('result'),
    **regular_op('rank', {'type': 'Rank'}),
    **regular_op('reduce_mean', {'type': 'ReduceMean'}),
    **regular_op('range', {'type': 'Range'}),
    **const('const_1', int64_array(2)),
    **const('const_2', int64_array(1)),
}
edges = [('input', 'relu', {
    'in': 0,
    'out': 0
}), ('relu', 'pooling', {
    'in': 0,
    'out': 0
}), ('pooling', 'result', {
    'in': 0,
    'out': 0
})]
ref_edges = [('input', 'relu', {
    'in': 0,
    'out': 0
import numpy as np

from generator import generator, generate

from openvino.tools.mo.front.tf.CorrectPaddingsForPadAfterComplex import CorrectPaddingsForPadAfterComplex
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph, const


graph_node_attrs = {
    'placeholder_real': {'shape': int64_array([3, 100, 67]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
    'placeholder_imag': {'shape': int64_array([3, 100, 67]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
    'complex': {'kind': 'op', 'op': 'Complex'},
    'pad': {'type': 'Pad', 'kind': 'op', 'op': 'Pad', 'mode': 'constant'},
    **const('pad_begin', int64_array([1, 3, 5])),
    **const('pad_end', int64_array([2, 4, 6])),
    'abs': {'type': 'Abs', 'kind': 'op', 'op': 'Abs'},
    'output': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'},
}

graph_edges = [
    ('placeholder_real', 'complex', {'in': 0}),
    ('placeholder_imag', 'complex', {'in': 1}),
    ('complex', 'pad', {'in': 0, 'out': 0}),
    ('pad_begin', 'pad', {'in': 1, 'out': 0}),
    ('pad_end', 'pad', {'in': 2, 'out': 0}),
    ('pad', 'abs'),
    ('abs', 'output'),
]
    def test1(self):
        nodes_attributes = {
            # nodes from original graph
            'input': {
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'index': {
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'add': {
                'type': 'Add',
                'kind': 'op',
                'op': 'Add'
            },
            **const('slice_size', int64_array(1)),
            'begin': {
                'type': 'Pack',
                'kind': 'op',
                'op': 'Pack'
            },
            **const('begin_1', int64_array(0)),
            **const('begin_3', int64_array(0)),
            'end': {
                'type': 'Pack',
                'kind': 'op',
                'op': 'Pack'
            },
            **const('end_1', int64_array(0)),
            **const('end_3', int64_array(0)),
            **const('step', int64_array([1, 1, 1])),
            'strided_slice': {
                'type': 'StridedSlice',
                'kind': 'op',
                'op': 'StridedSlice',
                'begin_mask': int64_array([0, 1, 0]),
                'end_mask': int64_array([0, 1, 0]),
                'shrink_axis_mask': int64_array([0, 1, 0]),
                'name': 'non_const_begin_strided_slice'
            },
            'result': {
                'type': 'Result',
                'kind': 'op',
                'op': 'Result'
            },

            # nodes from the reference graph
            'unsqueeze': {
                'type': 'Unsqueeze',
                'kind': 'op',
                'op': 'Unsqueeze'
            },
            **const('unsqueeze_axis', int64_array(0)),
            'gather': {
                'type': 'Gather',
                'kind': 'op',
                'op': 'Gather'
            },
            **const('gather_axis', int64_array(1)),
            'squeeze': {
                'type': 'Squeeze',
                'kind': 'op',
                'op': 'Squeeze'
            },
            **const('squeeze_axis', int64_array(1)),
        }

        graph = build_graph(nodes_attributes, [
            ('input', 'strided_slice', {
                'out': 0,
                'in': 0
            }),
            ('begin_1', 'begin', {
                'out': 0,
                'in': 0
            }),
            ('index', 'begin', {
                'out': 0,
                'in': 1
            }),
            ('begin_3', 'begin', {
                'out': 0,
                'in': 2
            }),
            ('begin', 'strided_slice', {
                'out': 0,
                'in': 1
            }),
            ('end_1', 'end', {
                'out': 0,
                'in': 0
            }),
            ('index', 'add', {
                'out': 0,
                'in': 0
            }),
            ('slice_size', 'add', {
                'out': 0,
                'in': 1
            }),
            ('add', 'end', {
                'out': 0,
                'in': 1
            }),
            ('end_3', 'end', {
                'out': 0,
                'in': 2
            }),
            ('end', 'strided_slice', {
                'out': 0,
                'in': 2
            }),
            ('step', 'strided_slice', {
                'out': 0,
                'in': 3
            }),
            ('strided_slice', 'result', {
                'out': 0,
                'in': 0
            }),
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        NonConstBeginStridedSliceReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes, [
            ('input', 'gather', {
                'out': 0,
                'in': 0
            }),
            ('gather_axis', 'gather', {
                'out': 0,
                'in': 2
            }),
            ('index', 'unsqueeze', {
                'out': 0,
                'in': 0
            }),
            ('unsqueeze_axis', 'unsqueeze', {
                'out': 0,
                'in': 1
            }),
            ('unsqueeze', 'gather', {
                'out': 0,
                'in': 1
            }),
            ('gather', 'squeeze', {
                'out': 0,
                'in': 0
            }),
            ('squeeze_axis', 'squeeze', {
                'out': 0,
                'in': 1
            }),
            ('squeeze', 'result', {
                'out': 0,
                'in': 0
            }),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'result',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
        self.assertTrue(graph.node[graph.get_nodes_with_attributes(
            op='Squeeze')[0]]['name'] == 'non_const_begin_strided_slice')
Beispiel #20
0
    def test2(self):
        nodes_attributes = {
            # nodes from original graph
            'logits': {
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'seq_len': {
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'order_arr': {
                'kind': 'op',
                'op': 'Const'
            },
            'transpose': {
                'type': 'Transpose',
                'kind': 'op',
                'op': 'Transpose'
            },
            'decoder': {
                'kind': 'op',
                'op': 'CTCGreedyDecoderSeqLen',
                'merge_repeated': True
            },
            'cast': {
                'kind': 'op',
                'op': 'Cast'
            },
            'sparse_to_dense': {
                'kind': 'op',
                'op': 'SparseToDense'
            },
            'last': {
                'type': None,
                'value': None,
                'kind': 'op',
                'op': 'Result'
            },

            # new nodes
            'new_decoder': {
                'kind': 'op',
                'op': 'CTCGreedyDecoderSeqLen',
                'use_mask_format': True
            },
            **const('squeeze_axes', int64_array([2, 3])),
            'squeeze_dec_seq': {
                'kind': 'op',
                'op': 'Squeeze'
            },
            'cast_to_int': {
                'kind': 'op',
                'op': 'Cast'
            },
        }

        graph = build_graph(nodes_attributes, [
            ('logits', 'decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'decoder', {
                'out': 0,
                'in': 1
            }),
            ('decoder', 'sparse_to_dense', {
                'out': 0,
                'in': 0
            }),
            ('decoder', 'cast', {
                'out': 1,
                'in': 0
            }),
            ('cast', 'sparse_to_dense', {
                'out': 0
            }),
            ('sparse_to_dense', 'last', {
                'out': 0,
                'in': 0
            }),
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        CTCGreedyDecoderReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes, [
            ('logits', 'transpose', {
                'out': 0,
                'in': 0
            }),
            ('order_arr', 'transpose', {
                'out': 0,
                'in': 1
            }),
            ('transpose', 'decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'decoder', {
                'out': 0,
                'in': 1
            }),
            ('decoder', 'last', {
                'out': 0,
                'in': 0
            }),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Beispiel #21
0
class CTCLossFrontReplacementTest(unittest.TestCase):
    nodes_attributes = {
        'logits': {
            'shape': int64_array([2, 6, 100]),
            'type': 'Parameter',
            'kind': 'op',
            'op': 'Parameter'
        },
        'seq_mask': {
            'shape': int64_array([2]),
            'data_type': np.int32,
            'kind': 'op',
            'op': 'Parameter'
        },
        'transpose': {
            'kind': 'op',
            'op': 'Transpose'
        },
        'ctc_greedy_decoder': {
            'kind': 'op',
            'op': 'CTCGreedyDecoderSeqLen',
            'merge_repeated': True,
            'output_sparse_format': True
        },
        'cast': {
            'kind': 'op',
            'op': 'Cast'
        },
        'sparse_to_dense': {
            'kind': 'op',
            'op': 'SparseToDense'
        },
        'tf_ctc_loss_true_logits': {
            'kind': 'op',
            'op': 'CTCLoss',
            'preprocess_collapse_repeated': False,
            'ctc_merge_repeated': True,
            'unique': False,
            'logits_time_major': True
        },
        'tf_ctc_loss_false_logits': {
            'kind': 'op',
            'op': 'CTCLoss',
            'preprocess_collapse_repeated': False,
            'ctc_merge_repeated': True,
            'unique': False,
            'logits_time_major': False
        },
        'ctc_loss': {
            'kind': 'op',
            'op': 'CTCLoss',
            'preprocess_collapse_repeated': False,
            'ctc_merge_repeated': True,
            'unique': False
        },
        **const('default_value', int64_array(-1)),
        'last': {
            'type': None,
            'value': None,
            'kind': 'op',
            'op': 'Result'
        },
        'transpose2': {
            'kind': 'op',
            'op': 'Transpose'
        },
        **const('transpose2_axis', int64_array([1, 0, 2])),
        'new_ctc_greedy_decoder': {
            'kind': 'op',
            'op': 'CTCGreedyDecoderSeqLen',
            'merge_repeated': True
        },
    }

    def CTCLossReplacement_test_true_logits(self):
        graph = build_graph(
            self.nodes_attributes,
            [('logits', 'transpose', {
                'out': 0,
                'in': 0
            }), ('transpose', 'ctc_greedy_decoder', {
                'out': 0,
                'in': 0
            }), ('seq_mask', 'ctc_greedy_decoder', {
                'out': 0,
                'in': 1
            }),
             ('transpose', 'tf_ctc_loss_true_logits', {
                 'out': 0,
                 'in': 0
             }), ('seq_mask', 'tf_ctc_loss_true_logits', {
                 'out': 0,
                 'in': 3
             }), ('ctc_greedy_decoder', 'sparse_to_dense', {
                 'out': 0,
                 'in': 0
             }), ('ctc_greedy_decoder', 'sparse_to_dense', {
                 'out': 2,
                 'in': 1
             }), ('ctc_greedy_decoder', 'sparse_to_dense', {
                 'out': 1,
                 'in': 2
             }), ('default_value', 'sparse_to_dense', {
                 'out': 0,
                 'in': 3
             }), ('ctc_greedy_decoder', 'cast', {
                 'out': 1,
                 'in': 0
             }),
             ('ctc_greedy_decoder', 'tf_ctc_loss_true_logits', {
                 'out': 0,
                 'in': 1
             }), ('cast', 'tf_ctc_loss_true_logits', {
                 'out': 0,
                 'in': 2
             }), ('tf_ctc_loss_true_logits', 'last', {
                 'out': 0,
                 'in': 0
             })],
            nodes_with_edges_only=True)
        graph.graph['cmd_params'] = Namespace(data_type='FP32')
        graph.stage = 'front'
        CTCLossReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(self.nodes_attributes,
                                [('logits', 'transpose', {
                                    'out': 0,
                                    'in': 0
                                }),
                                 ('transpose', 'transpose2', {
                                     'out': 0,
                                     'in': 0
                                 }),
                                 ('transpose2_axis', 'transpose2', {
                                     'out': 0,
                                     'in': 1
                                 }),
                                 ('transpose2', 'new_ctc_greedy_decoder', {
                                     'out': 0,
                                     'in': 0
                                 }),
                                 ('seq_mask', 'new_ctc_greedy_decoder', {
                                     'out': 0,
                                     'in': 1
                                 }),
                                 ('transpose2', 'ctc_loss', {
                                     'out': 0,
                                     'in': 0
                                 }),
                                 ('new_ctc_greedy_decoder', 'ctc_loss', {
                                     'out': 0,
                                     'in': 2
                                 }),
                                 ('new_ctc_greedy_decoder', 'ctc_loss', {
                                     'out': 1,
                                     'in': 3
                                 }),
                                 ('seq_mask', 'ctc_loss', {
                                     'out': 0,
                                     'in': 1
                                 }), ('ctc_loss', 'last', {
                                     'out': 0,
                                     'in': 0
                                 })],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def CTCLossReplacement_test_false_logits(self):
        graph = build_graph(
            self.nodes_attributes,
            [('logits', 'transpose', {
                'out': 0,
                'in': 0
            }), ('transpose', 'ctc_greedy_decoder', {
                'out': 0,
                'in': 0
            }), ('seq_mask', 'ctc_greedy_decoder', {
                'out': 0,
                'in': 1
            }),
             ('transpose', 'tf_ctc_loss_false_logits', {
                 'out': 0,
                 'in': 0
             }), ('seq_mask', 'tf_ctc_loss_false_logits', {
                 'out': 0,
                 'in': 3
             }), ('ctc_greedy_decoder', 'sparse_to_dense', {
                 'out': 0,
                 'in': 0
             }), ('ctc_greedy_decoder', 'sparse_to_dense', {
                 'out': 2,
                 'in': 1
             }), ('ctc_greedy_decoder', 'sparse_to_dense', {
                 'out': 1,
                 'in': 2
             }), ('default_value', 'sparse_to_dense', {
                 'out': 0,
                 'in': 3
             }), ('ctc_greedy_decoder', 'cast', {
                 'out': 1,
                 'in': 0
             }),
             ('ctc_greedy_decoder', 'tf_ctc_loss_false_logits', {
                 'out': 0,
                 'in': 1
             }), ('cast', 'tf_ctc_loss_false_logits', {
                 'out': 0,
                 'in': 2
             }), ('tf_ctc_loss_false_logits', 'last', {
                 'out': 0,
                 'in': 0
             })],
            nodes_with_edges_only=True)
        graph.graph['cmd_params'] = Namespace(data_type='FP32')
        graph.stage = 'front'
        CTCLossReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(self.nodes_attributes,
                                [('logits', 'transpose', {
                                    'out': 0,
                                    'in': 0
                                }),
                                 ('transpose', 'transpose2', {
                                     'out': 0,
                                     'in': 0
                                 }),
                                 ('transpose2_axis', 'transpose2', {
                                     'out': 0,
                                     'in': 1
                                 }),
                                 ('transpose2', 'new_ctc_greedy_decoder', {
                                     'out': 0,
                                     'in': 0
                                 }),
                                 ('seq_mask', 'new_ctc_greedy_decoder', {
                                     'out': 0,
                                     'in': 1
                                 }),
                                 ('transpose', 'ctc_loss', {
                                     'out': 0,
                                     'in': 0
                                 }),
                                 ('new_ctc_greedy_decoder', 'ctc_loss', {
                                     'out': 0,
                                     'in': 2
                                 }),
                                 ('new_ctc_greedy_decoder', 'ctc_loss', {
                                     'out': 1,
                                     'in': 3
                                 }),
                                 ('seq_mask', 'ctc_loss', {
                                     'out': 0,
                                     'in': 1
                                 }), ('ctc_loss', 'last', {
                                     'out': 0,
                                     'in': 0
                                 })],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Beispiel #22
0
        'type': 'Greater',
        'kind': 'op',
        'op': 'Greater'
    },
    'mul': {
        'type': 'Multiply',
        'kind': 'op',
        'op': 'Mul',
        'name': 'my_trelu'
    },
    'squeeze2': {
        'type': 'Squeeze',
        'kind': 'op',
        'op': 'Squeeze'
    },
    **const('alpha', float_array([0.75])),
}


class ThresholdedReluDecompositionTest(unittest.TestCase):
    def test_trelu(self):
        graph = build_graph(nodes_attributes, [
            ('parameter', 'trelu', {
                'in': 0,
                'out': 0
            }),
            ('trelu', 'result', {
                'in': 0,
                'out': 0
            }),
        ],
class CTCGreedyDecoderReplacementTests(unittest.TestCase):
    nodes_attributes = {
        # nodes from original graph
        'logits': {
            'type': 'Parameter',
            'kind': 'op',
            'op': 'Parameter'
        },
        'seq_len': {
            'type': 'Parameter',
            'kind': 'op',
            'op': 'Parameter'
        },
        'order_arr': {
            'kind': 'op',
            'op': 'Const'
        },
        'transpose': {
            'type': 'Transpose',
            'kind': 'op',
            'op': 'Transpose'
        },
        'decoder': {
            'kind': 'op',
            'op': 'CTCGreedyDecoderSeqLen',
            'merge_repeated': True,
            'output_sparse_format': True
        },
        'cast': {
            'kind': 'op',
            'op': 'Cast'
        },
        'sparse_to_dense': {
            'kind': 'op',
            'op': 'SparseToDense'
        },
        'last': {
            'type': None,
            'value': None,
            'kind': 'op',
            'op': 'Result'
        },
        'last_1': {
            'type': None,
            'value': None,
            'kind': 'op',
            'op': 'Result'
        },

        # new nodes
        'new_decoder': {
            'kind': 'op',
            'op': 'CTCGreedyDecoderSeqLen',
            'merge_repeated': True
        },
        **const('squeeze_axes', int64_array([2, 3])),
        'squeeze_dec_seq': {
            'kind': 'op',
            'op': 'Squeeze'
        },
        'cast_to_int': {
            'kind': 'op',
            'op': 'Cast'
        },
        'out_seq_len': {
            'type': None,
            'value': None,
            'kind': 'op',
            'op': 'Result'
        },
    }

    def test_CTCGreedyDecoderWithSparseToDenseShape(self):
        graph = build_graph(self.nodes_attributes, [
            ('logits', 'decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'decoder', {
                'out': 0,
                'in': 1
            }),
            ('decoder', 'sparse_to_dense', {
                'out': 0,
                'in': 0
            }),
            ('decoder', 'sparse_to_dense', {
                'out': 2,
                'in': 1
            }),
            ('decoder', 'cast', {
                'out': 1,
                'in': 0
            }),
            ('cast', 'sparse_to_dense', {
                'out': 0
            }),
            ('sparse_to_dense', 'last', {
                'out': 0,
                'in': 0
            }),
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        CTCGreedyDecoderWithSparseToDenseShapeReplacement(
        ).find_and_replace_pattern(graph)

        graph_ref = build_graph(self.nodes_attributes, [
            ('logits', 'transpose', {
                'out': 0,
                'in': 0
            }),
            ('order_arr', 'transpose', {
                'out': 0,
                'in': 1
            }),
            ('transpose', 'new_decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'new_decoder', {
                'out': 0,
                'in': 1
            }),
            ('new_decoder', 'last', {
                'out': 0,
                'in': 0
            }),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_CTCGreedyDecoderReplacement(self):
        graph = build_graph(self.nodes_attributes, [
            ('logits', 'decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'decoder', {
                'out': 0,
                'in': 1
            }),
            ('decoder', 'sparse_to_dense', {
                'out': 0,
                'in': 0
            }),
            ('decoder', 'cast', {
                'out': 1,
                'in': 0
            }),
            ('cast', 'sparse_to_dense', {
                'out': 0
            }),
            ('sparse_to_dense', 'last', {
                'out': 0,
                'in': 0
            }),
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        CTCGreedyDecoderReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(self.nodes_attributes, [
            ('logits', 'transpose', {
                'out': 0,
                'in': 0
            }),
            ('order_arr', 'transpose', {
                'out': 0,
                'in': 1
            }),
            ('transpose', 'new_decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'new_decoder', {
                'out': 0,
                'in': 1
            }),
            ('new_decoder', 'last', {
                'out': 0,
                'in': 0
            }),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_CTCGreedyDecoderSingle(self):
        graph = build_graph(self.nodes_attributes, [
            ('logits', 'decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'decoder', {
                'out': 0,
                'in': 1
            }),
            ('decoder', 'last', {
                'out': 0,
                'in': 0
            }),
            ('decoder', 'last_1', {
                'out': 1,
                'in': 0
            }),
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        CTCGreedyDecoderSingleReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(self.nodes_attributes, [
            ('logits', 'transpose', {
                'out': 0,
                'in': 0
            }),
            ('order_arr', 'transpose', {
                'out': 0,
                'in': 1
            }),
            ('transpose', 'new_decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'new_decoder', {
                'out': 0,
                'in': 1
            }),
            ('new_decoder', 'last', {
                'out': 0,
                'in': 0
            }),
            ('new_decoder', 'last_1', {
                'out': 1,
                'in': 0
            }),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_CTCGreedyDecoderSingle_negative(self):
        edges = [
            ('logits', 'decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'decoder', {
                'out': 0,
                'in': 1
            }),
            ('decoder', 'sparse_to_dense', {
                'out': 0,
                'in': 0
            }),
            ('decoder', 'cast', {
                'out': 1,
                'in': 0
            }),
            ('cast', 'sparse_to_dense', {
                'out': 0
            }),
            ('sparse_to_dense', 'last', {
                'out': 0,
                'in': 0
            }),
        ]
        graph = build_graph(self.nodes_attributes,
                            edges,
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        CTCGreedyDecoderSingleReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(self.nodes_attributes,
                                edges,
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)

    def test_CTCGreedyDecoder_no_consequent_transforms(self):
        graph = build_graph(self.nodes_attributes, [
            ('logits', 'decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'decoder', {
                'out': 0,
                'in': 1
            }),
            ('decoder', 'sparse_to_dense', {
                'out': 0,
                'in': 0
            }),
            ('decoder', 'sparse_to_dense', {
                'out': 2,
                'in': 1
            }),
            ('decoder', 'cast', {
                'out': 1,
                'in': 0
            }),
            ('cast', 'sparse_to_dense', {
                'out': 0
            }),
            ('sparse_to_dense', 'last', {
                'out': 0,
                'in': 0
            }),
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        CTCGreedyDecoderWithSparseToDenseShapeReplacement(
        ).find_and_replace_pattern(graph)
        CTCGreedyDecoderSingleReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(self.nodes_attributes, [
            ('logits', 'transpose', {
                'out': 0,
                'in': 0
            }),
            ('order_arr', 'transpose', {
                'out': 0,
                'in': 1
            }),
            ('transpose', 'new_decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_len', 'new_decoder', {
                'out': 0,
                'in': 1
            }),
            ('new_decoder', 'last', {
                'out': 0,
                'in': 0
            }),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Beispiel #24
0
        'op': 'Result'
    },

    # new Pad layer and inputs
    'pad': {
        'type': 'Pad',
        'kind': 'op',
        'op': 'Pad',
        'mode': 'constant'
    },
    'convert_like': {
        'type': 'ConvertLike',
        'kind': 'op',
        'op': 'ConvertLike'
    },
    **const('pad_begin', int64_array([1, 3, 5])),
    **const('pad_end', int64_array([2, 4, 6])),
    **const('pad_fill', np.array(0.75)),
}


class AttributedPadToPadTest(unittest.TestCase):
    def test_mode_constant(self):
        graph = build_graph(nodes_attributes, [
            ('placeholder', 'attr_pad', {
                'in': 0,
                'out': 0
            }),
            ('attr_pad', 'result', {
                'in': 0,
                'out': 0
Beispiel #25
0
nodes_attributes = {
    'placeholder': {
        'shape': None,
        'type': 'Parameter',
        'kind': 'op',
        'op': 'Parameter'
    },
    'tfpad': {
        'type': None,
        'kind': 'op',
        'op': 'TFPad',
        'mode': 'constant',
        'name': 'tfpad_name'
    },
    **const('paddings',
            int64_array([1, 2, 3, 4, 5, 6]).reshape([3, 2])),
    **const('fill', float_array(5.75)),
    'result': {
        'type': 'Result',
        'value': None,
        'kind': 'op',
        'op': 'Result'
    },

    # new Pad layer and sub-graph
    'pad': {
        'type': 'Pad',
        'kind': 'op',
        'op': 'Pad',
        'mode': 'constant'
    },
Beispiel #26
0
    def test_per_sample_weights(self):
        nodes = {
            **const('weights_inp', np.random.randn(100, 2)),
            **regular_op('indices_inp', {'type': 'Parameter'}),
            **regular_op('offsets_inp', {'type': 'Parameter'}),
            **regular_op('per_sample_weights', {'type': 'Parameter'}),
            **regular_op(
                'aten', {
                    'type': None,
                    'kind': 'op',
                    'op': 'ATen',
                    'operator': 'embedding_bag',
                    'mode': 0,
                    'name': 'my_aten'
                }),
            **regular_op(
                'emb_bag', {
                    'type': 'EmbeddingBagOffsetsSum',
                    'kind': 'op',
                    'op': 'EmbeddingBagOffsetsSum'
                }),
            **regular_op('WeightsRank', {
                'type': None,
                'kind': 'op',
                'op': 'Rank'
            }),
            **regular_op('WeightsRank/axis', {
                'type': 'Add',
                'kind': 'op',
                'op': 'Add'
            }),
            **regular_op('gather1', {
                'type': 'Gather',
                'kind': 'op',
                'op': 'Gather'
            }),
            **regular_op('gather2', {
                'type': 'Gather',
                'kind': 'op',
                'op': 'Gather'
            }),
            **regular_op('WeightsShape', {
                'type': 'ShapeOf',
                'kind': 'op',
                'op': 'ShapeOf'
            }),
            **regular_op('Broadcast', {
                'type': 'Broadcast',
                'kind': 'op',
                'op': 'Broadcast'
            }),
            **regular_op('Unsqueeze', {
                'type': 'Unsqueeze',
                'kind': 'op',
                'op': 'Unsqueeze'
            }),
            **const('WeightsShape/Axis', int64_array(0)),
            **const('zero1', int64_array(0)),
            **const('zero2', int64_array(0)),
            **const('Unsqueeze/value', int64_array(0)),
            **const('Broadcast/value', int64_array(0)),
            **const('neg', int64_array(-1)),
            **regular_op('Concat', {
                'type': 'Concat',
                'kind': 'op',
                'op': 'Concat'
            }),
            **result('result'),
        }
        edges = [
            ('weights_inp', 'aten'),
            ('indices_inp', 'aten'),
            ('offsets_inp', 'aten'),
            ('per_sample_weights', 'aten'),
            ('aten', 'result'),
        ]
        graph = build_graph(nodes, edges, nodes_with_edges_only=True)

        graph.graph['layout'] = 'NCHW'
        graph.stage = 'front'

        edges_ref = [
            ('weights_inp', 'Concat', {
                'in': 0,
                'out': 0
            }),
            ('weights_inp', 'WeightsShape', {
                'in': 0,
                'out': 0
            }),
            ('weights_inp', 'WeightsRank', {
                'in': 0,
                'out': 0
            }),
            ('WeightsRank', 'WeightsRank/axis'),
            ('neg', 'WeightsRank/axis'),
            ('WeightsShape', 'gather1', {
                'in': 0,
                'out': 0
            }),
            ('WeightsRank/axis', 'gather1'),
            ('WeightsShape/Axis', 'gather1'),
            ('WeightsShape', 'gather2', {
                'in': 0,
                'out': 0
            }),
            ('zero1', 'gather2'),
            ('zero2', 'gather2'),
            ('Broadcast/value', 'Broadcast'),
            ('gather1', 'Broadcast'),
            ('Broadcast', 'Unsqueeze'),
            ('Unsqueeze/value', 'Unsqueeze'),
            ('Unsqueeze', 'Concat'),
            ('Concat', 'emb_bag'),
            ('indices_inp', 'emb_bag'),
            ('offsets_inp', 'emb_bag'),
            ('gather2', 'emb_bag'),
            ('per_sample_weights', 'emb_bag'),
            ('emb_bag', 'result'),
        ]

        graph_ref = build_graph(nodes, edges_ref, nodes_with_edges_only=True)

        AtenToEmbeddingBag().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
class HSigmoidWithReluMulTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {'type': 'Parameter'}),
        **regular_op('add', {'op': 'Add'}),
        **regular_op('relu', {'op': 'ReLU'}),
        **regular_op('min', {'op': 'Minimum'}),
        **regular_op('mul', {
            'op': 'Mul',
            'name': 'final_mul'
        }),
        **const('add_const', float_array([3.0])),
        **const('min_const', float_array([6.0])),
        **const('mul_const', float_array([1.0 / 6.0])),
        **result('result'),
    }

    edges = [('input', 'add', {
        'in': 0,
        'out': 0
    }), ('add_const', 'add', {
        'in': 1,
        'out': 0
    }), ('add', 'relu', {
        'in': 0,
        'out': 0
    }), ('relu', 'min', {
        'in': 0,
        'out': 0
    }), ('min_const', 'min', {
        'in': 1,
        'out': 0
    }), ('min', 'mul', {
        'in': 0,
        'out': 0
    }), ('mul_const', 'mul', {
        'in': 1,
        'out': 0
    }), ('mul', 'result', {
        'in': 0,
        'out': 0
    })]

    def test_hsigmoid_with_relu_mul(self):
        graph = build_graph_with_edge_attrs(self.nodes, self.edges, {})

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'HSigmoid')
        self.assertTrue(
            graph.get_op_nodes(
                name='final_mul')[0].out_nodes()[0].node == 'result')

    def test_hsigmoid_with_relu_mul_wrong_constant(self):
        graph = build_graph_with_edge_attrs(
            self.nodes, self.edges,
            {'add_const': {
                'value': float_array([0.00001])
            }})

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)

    def test_hsigmoid_with_relu_mul_different_tensors(self):
        graph = build_graph_with_edge_attrs(
            {
                **regular_op('input', {'type': 'Parameter'}),
                **regular_op('input_2', {'type': 'Parameter'}),
                **regular_op('add', {'op': 'Add'}),
                **regular_op('max', {'op': 'Maximum'}),
                **regular_op('min', {'op': 'Minimum'}),
                **regular_op('mul', {'op': 'Mul'}),
                **regular_op('mul_2', {
                    'op': 'Mul',
                    'name': 'final_mul'
                }),
                **const('const_0', float_array([0.0])),
                **const('const_3', float_array([3.0])),
                **const('const_6', float_array([6.0])),
                **const('const_1_6', float_array([1.0 / 6.0])),
                **result('result'),
            }, [('input_2', 'mul', {
                'in': 1,
                'out': 0
            }), ('input', 'add', {
                'in': 0,
                'out': 0
            }), ('const_3', 'add', {
                'in': 1,
                'out': 0
            }), ('add', 'max', {
                'in': 0,
                'out': 0
            }), ('const_0', 'max', {
                'in': 1,
                'out': 0
            }), ('max', 'min', {
                'in': 0,
                'out': 0
            }), ('const_6', 'min', {
                'in': 1,
                'out': 0
            }), ('min', 'mul', {
                'in': 0,
                'out': 0
            }), ('mul', 'mul_2', {
                'in': 0,
                'out': 0
            }), ('const_1_6', 'mul_2', {
                'in': 1,
                'out': 0
            }), ('mul_2', 'result', {
                'in': 0,
                'out': 0
            })])

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Beispiel #28
0
import unittest

import numpy as np

from openvino.tools.mo.front.onnx.pad_converter import ONNXPadToPad
from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph, const

nodes_attributes = {
    'placeholder': {
        'shape': None,
        'type': 'Parameter',
        'kind': 'op',
        'op': 'Parameter'
    },
    **const('pads', np.array([1, 2, 3, 4], dtype=np.int64)),
    **const('value', np.array(0.5, dtype=np.float32)),
    'onnx_pad': {
        'type': None,
        'kind': 'op',
        'op': 'ONNXPad',
        'name': 'my_pad',
        'mode': 'constant'
    },
    'result': {
        'type': 'Result',
        'value': None,
        'kind': 'op',
        'op': 'Result'
    },
    'pad': {
import unittest

import numpy as np

from openvino.tools.mo.front.tf.TFSliceToSlice import TFSliceToSliceReplacer
from openvino.tools.mo.utils.ir_engine.compare_graphs import compare_graphs
from unit_tests.utils.graph import build_graph, regular_op_with_empty_data, result, const, connect_front

nodes = {
    **regular_op_with_empty_data('input', {'type': 'Parameter'}),
    **regular_op_with_empty_data('tfslice', {
        'op': 'TFSlice',
        'type': None
    }),
    **const('begin', np.array(0)),
    **const('size', np.array([-1])),
    **regular_op_with_empty_data('john_doe', {
        'op': 'Sum',
        'type': None
    }),
    **result(),

    # nodes after replacement
    **const('minus_one', np.array(-1)),
    **regular_op_with_empty_data('shapeof', {
        'op': 'ShapeOf',
        'type': 'ShapeOf'
    }),
    **regular_op_with_empty_data('end_const', {
        'op': 'Add',
Beispiel #30
0
 'attr_split': {
     'type': None,
     'kind': 'op',
     'op': 'AttributedSplit',
     'axis': 0,
     'num_splits': 2,
     'squeeze_axis': True
 },
 'split': {
     'type': 'Split',
     'kind': 'op',
     'op': 'Split',
     'num_splits': 2,
     'squeeze_axis': True
 },
 **const('split_axis', int64_array(0)),
 'concat': {
     'type': 'Concat',
     'kind': 'op',
     'op': 'Concat',
     'axis': 0
 },
 'result': {
     'type': 'Result',
     'value': None,
     'kind': 'op',
     'op': 'Result'
 },
 'squeeze1': {
     'type': 'Squeeze',
     'kind': 'op',