def setUp(self):
        self.start_node_name = 'StatefulPartitionedCall/Preprocessor/unstack'
        self.end_node_name = 'StatefulPartitionedCall/Preprocessor/stack'
        self.end_node_name2 = 'StatefulPartitionedCall/Preprocessor/stack2'
        self.loop_start_node_name = 'prefix/map/while/Preprocessor/unstack'
        self.loop_end_node_name = 'prefix/map/while/Preprocessor/stack'
        self.mul_const = float32_array([0.025, 0.374, -0.45])
        self.sub_const = float32_array([2.0, 3.0, 4.0])

        self.nodes = {
            **regular_op('input', {'type': 'Parameter'}),

            **regular_op('mul', {'op': 'Mul', 'type': 'Multiply', 'name': 'my_mul'}),
            **regular_op('sub', {'op': 'Sub', 'type': 'Subtract', 'name': 'my_sub'}),
            **const('mul_const', self.mul_const),
            **const('sub_const', self.sub_const),

            **regular_op(self.start_node_name, {'op': 'Identity'}),
            **regular_op(self.end_node_name, {'op': 'Identity'}),
            **regular_op(self.end_node_name2, {'op': 'Identity'}),

            **regular_op('loop', {'op': 'Loop', 'body': None}),

            **regular_op('resize', {'type': 'Interpolate'}),
            **result('result'),
        }
        self.replacement_desc = {'start_nodes': [self.start_node_name],
                                 'end_nodes': [self.end_node_name, self.end_node_name2]}
    def test(self):
        nodes = {
            **regular_op('input', {'type': 'Parameter'}),
            **const('depth', int64_array([2])),
            **regular_op('onehot', {'type': 'OneHot', 'kind': 'op', 'op': 'OneHot'}),

            **regular_op('reshape', {'type': 'Reshape', 'kind': 'op', 'op': 'Reshape'}),
            **const('reshape_dims', int64_array([])),
            **result('result'),
        }
        edges = [('input', 'onehot'),
                 ('depth', 'onehot'),
                 ('onehot', 'result'),
                 ]
        graph = build_graph(nodes, edges)

        graph.graph['layout'] = 'NCHW'
        graph.stage = 'front'

        edges_ref = [('input', 'onehot'),
                     ('depth', 'reshape'),
                     ('reshape_dims', 'reshape'),
                     ('reshape', 'onehot'),
                     ('onehot', 'result'),
                     ]

        graph_ref = build_graph(nodes, edges_ref)

        OneHotDepthNormalizer().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Exemple #3
0
    def test_hsigmoid_with_relu_mul_different_tensors(self):
        graph = build_graph_with_edge_attrs(
            {
                **regular_op('input', {'type': 'Parameter'}),
                **regular_op('input_2', {'type': 'Parameter'}),
                **regular_op('add', {'op': 'Add'}),
                **regular_op('max', {'op': 'Maximum'}),
                **regular_op('min', {'op': 'Minimum'}),
                **regular_op('mul', {'op': 'Mul'}),
                **regular_op('mul_2', {
                    'op': 'Mul',
                    'name': 'final_mul'
                }),
                **const('const_0', float_array([0.0])),
                **const('const_3', float_array([3.0])),
                **const('const_6', float_array([6.0])),
                **const('const_1_6', float_array([1.0 / 6.0])),
                **result('result'),
            }, [('input_2', 'mul', {
                'in': 1,
                'out': 0
            }), ('input', 'add', {
                'in': 0,
                'out': 0
            }), ('const_3', 'add', {
                'in': 1,
                'out': 0
            }), ('add', 'max', {
                'in': 0,
                'out': 0
            }), ('const_0', 'max', {
                'in': 1,
                'out': 0
            }), ('max', 'min', {
                'in': 0,
                'out': 0
            }), ('const_6', 'min', {
                'in': 1,
                'out': 0
            }), ('min', 'mul', {
                'in': 0,
                'out': 0
            }), ('mul', 'mul_2', {
                'in': 0,
                'out': 0
            }), ('const_1_6', 'mul_2', {
                'in': 1,
                'out': 0
            }), ('mul_2', 'result', {
                'in': 0,
                'out': 0
            })])

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Exemple #4
0
class SoftplusFusionTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {'type': 'Parameter'}),
        **regular_op('exp', {'op': 'Exp'}),
        **const('const_1', float_array([1.0])),
        **regular_op('add', {'op': 'Add'}),
        **regular_op('ln', {
            'op': 'Log',
            'name': 'final_log'
        }),
        **result('result'),
    }

    edges = [('input', 'exp', {
        'in': 0,
        'out': 0
    }), ('const_1', 'add', {
        'in': 0,
        'out': 0
    }), ('exp', 'add', {
        'in': 1,
        'out': 0
    }), ('add', 'ln', {
        'in': 0,
        'out': 0
    }), ('ln', 'result', {
        'in': 0,
        'out': 0
    })]

    def test_softplus_fusion_test(self):
        graph = build_graph_with_edge_attrs(self.nodes, self.edges, {})

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        SoftplusFusion().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            len(graph.get_op_nodes(name='final_log')) == 1
            and graph.get_op_nodes(name='final_log')[0].op == 'SoftPlus')

    def test_softplus_fusion_test_wrong_const(self):
        graph = build_graph_with_edge_attrs(
            self.nodes, self.edges,
            {'const_1': {
                'value': float_array([0.9999])
            }})

        graph_ref = graph.copy()
        graph.stage = 'front'

        SoftplusFusion().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        def build_body_graph(pre_processing: str):
            nodes = {
                **regular_op('input', {'type': 'Parameter'}),

                **regular_op('mul', {'op': 'Mul', 'type': 'Multiply', 'name': 'my_body_mul'}),
                **regular_op('sub', {'op': 'Sub', 'type': 'Subtract', 'name': 'my_body_sub'}),
                **const('body_mul_const', self.mul_const),
                **const('body_sub_const', self.sub_const),

                **regular_op(self.loop_start_node_name, {'op': 'Identity'}),
                **regular_op(self.loop_end_node_name, {'op': 'Identity'}),

                **regular_op('resize', {'type': 'Interpolate'}),
                **result('result'),
            }
            edges = None
            if pre_processing == 'no':
                edges = [*connect_front('input', self.loop_start_node_name),
                         *connect_front(self.loop_start_node_name, 'resize'),
                         *connect_front('resize', self.loop_end_node_name),
                         *connect_front(self.loop_end_node_name, 'result'),
                         ]
            elif pre_processing == 'trailing':
                edges = [*connect_front('input', self.loop_start_node_name),
                         *connect_front(self.loop_start_node_name, 'resize'),
                         *connect_front('resize', self.loop_end_node_name),
                         *connect_front(self.loop_end_node_name, '0:mul'),
                         *connect_front('body_mul_const', '1:mul'),
                         *connect_front('body_sub_const', '0:sub'),
                         *connect_front('mul', '1:sub'),
                         *connect_front('sub', 'result'),
                         ]
            else:
                edges = [*connect_front('input', '0:mul'),
                         *connect_front('body_mul_const', '1:mul'),
                         *connect_front('body_sub_const', '0:sub'),
                         *connect_front('mul', '1:sub'),
                         *connect_front('sub', self.loop_start_node_name),
                         *connect_front(self.loop_start_node_name, 'resize'),
                         *connect_front('resize', self.loop_end_node_name),
                         *connect_front(self.loop_end_node_name, 'result'),
                         ]
            graph = build_graph(nodes, edges, nodes_with_edges_only=True)
            graph.stage = 'front'
            return graph
Exemple #6
0
    def test_attributed_slice_replacer(self, attributed_slice_attrs):
        nodes = {
            **regular_op_with_empty_data('input', {'type': 'Parameter'}),
            **regular_op_with_empty_data('attributed_slice', attributed_slice_attrs),
            **result(),

            # nodes after replacement
            **const('start', np.array([0, 0])),
            **const('end', np.array([1, -1])),
            **const('axis', np.array(np.array([0, 1]))),
            **regular_op_with_empty_data('slice', {
                'op': 'Slice',
                'type': None
            }),
        }

        graph = build_graph(nodes_attrs=nodes,
                            edges=[
                                ('input', 'attributed_slice'),
                                ('attributed_slice', 'output'),
                            ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'

        AttributedSliceToSliceReplacer().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attrs=nodes,
                                edges=[
                                    ('input', 'slice'),
                                    *connect_front('start', '1:slice'),
                                    *connect_front('end', '2:slice'),
                                    *connect_front('axis', '3:slice'),
                                    ('slice', 'output'),
                                ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'output',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Exemple #7
0
    def test_multi(self):
        nodes = {
            **regular_op('input', {'type': 'Parameter'}),
            **regular_op('some_op', {'type': 'SomeOp', 'name': 'some_op_name'}),
            **regular_op('fake_output1', {'type': None, 'kind': 'op', 'op': 'FakeOutput', 'name': 'my_output_name1'}),
            **regular_op('fake_output2', {'type': None, 'kind': 'op', 'op': 'FakeOutput', 'name': 'my_output_name2'}),

            **const('const1', int64_array(0)),
            **const('const2', int64_array(0)),
            **regular_op('add1', {'type': None, 'kind': 'op', 'op': 'Add', 'name': 'my_output_name1'}),
            **regular_op('add2', {'type': None, 'kind': 'op', 'op': 'Add', 'name': 'my_output_name2'}),
            **result('result1'),
            **result('result2'),
        }
        edges = [('input', 'some_op'),
                 ('some_op', 'fake_output1'),
                 ('some_op', 'fake_output2'),
                 ('fake_output1', 'result1'),
                 ('fake_output2', 'result2'),
                 ]
        graph = build_graph(nodes, edges)

        graph.graph['layout'] = 'NCHW'
        graph.stage = 'front'

        edges_ref = [('input', 'some_op'),
                     ('some_op', 'add1'),
                     ('const1', 'add1'),
                     ('some_op', 'add2'),
                     ('const2', 'add2'),
                     ('add1', 'result1'),
                     ('add2', 'result2'),
                     ]

        graph_ref = build_graph(nodes, edges_ref)

        FakeOutputResolver().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result1')
        self.assertTrue(flag, resp)
Exemple #8
0
    def test1(self):
        nodes_attributes = {
            'input_indices': {'shape': int64_array([5, 2]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
            'input_values' : {'shape': int64_array([5]), 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},

            'sparse_to_dense' : {'kind': 'op', 'op': 'SparseToDense'},
            'broadcast' : {'kind': 'op', 'op': 'Broadcast'},
            'scatternd' : {'kind': 'op', 'op': 'ScatterNDUpdate'},
            'cast_default_value': {'kind': 'op', 'op': 'Cast'},

            'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'},

            **const('input_dense_shape', int64_array([50, 40])),
            **const('input_default_value', int64_array(0))}

        graph = build_graph(nodes_attributes,
                            [('input_indices', 'sparse_to_dense', {'out': 0, 'in': 0}),
                             ('input_dense_shape', 'sparse_to_dense', {'out': 0, 'in': 1}),
                             ('input_values', 'sparse_to_dense', {'out': 0, 'in': 2}),
                             ('input_default_value', 'sparse_to_dense', {'out': 0, 'in': 3}),
                             ('sparse_to_dense', 'last', {'out': 0, 'in': 0})],
                             nodes_with_edges_only=True)
        graph.stage = 'front'
        SparseToDenseReplacer().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes,
                                [('input_default_value', 'cast_default_value', {'in': 0}),
                                 ('cast_default_value', 'broadcast', {'in': 0}),
                                 ('input_dense_shape', 'broadcast', {'in': 1}),
                                 ('broadcast', 'scatternd', {'in': 0}),
                                 ('input_indices', 'scatternd', {'in': 1}),
                                 ('input_values', 'scatternd', {'in': 2}),
                                 ('scatternd', 'last', {'in': 0})],
                                 nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph, graph_ref, 'last', check_op_attrs=True)
        self.assertTrue(flag, resp)
Exemple #9
0
    def test(self):
        nodes = {
            **const('weights_inp', np.random.randn(100, 2)),
            **regular_op('indices_inp', {'type': 'Parameter'}),
            **regular_op('offsets_inp', {'type': 'Parameter'}),
            **regular_op(
                'aten', {
                    'type': None,
                    'kind': 'op',
                    'op': 'ATen',
                    'operator': 'embedding_bag',
                    'mode': 0,
                    'name': 'my_aten'
                }),
            **regular_op(
                'emb_bag', {
                    'type': 'EmbeddingBagOffsetsSum',
                    'kind': 'op',
                    'op': 'EmbeddingBagOffsetsSum'
                }),
            **result('result'),
        }
        edges = [
            ('weights_inp', 'aten'),
            ('indices_inp', 'aten'),
            ('offsets_inp', 'aten'),
            ('aten', 'result'),
        ]
        graph = build_graph(nodes, edges)

        graph.graph['layout'] = 'NCHW'
        graph.stage = 'front'

        edges_ref = [
            ('weights_inp', 'emb_bag'),
            ('indices_inp', 'emb_bag'),
            ('offsets_inp', 'emb_bag'),
            ('emb_bag', 'result'),
        ]

        graph_ref = build_graph(nodes, edges_ref)

        AtenToEmbeddingBag().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
Exemple #10
0
    def test1(self):
        nodes_attributes = {
            # nodes from original graph
            'logits': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
            'seq_len': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
            'order_arr': {'kind': 'op', 'op': 'Const'},
            'transpose': {'type': 'Transpose', 'kind': 'op', 'op': 'Transpose'},
            'decoder': {'kind': 'op', 'op': 'CTCGreedyDecoderSeqLen', 'merge_repeated': True},
            'cast': {'kind': 'op', 'op': 'Cast'},
            'sparse_to_dense': {'kind': 'op', 'op': 'SparseToDense'},
            'last': {'type': None, 'value': None, 'kind': 'op', 'op': 'Result'},

            # new nodes
            'new_decoder': {'kind': 'op', 'op': 'CTCGreedyDecoderSeqLen', 'use_mask_format': True},
            **const('squeeze_axes', int64_array([2, 3])),
            'squeeze_dec_seq': {'kind': 'op', 'op': 'Squeeze'},
            'cast_to_int': {'kind': 'op', 'op': 'Cast'},
        }

        graph = build_graph(nodes_attributes,
                            [('logits', 'decoder', {'out': 0, 'in': 0}),
                             ('seq_len', 'decoder', {'out': 0, 'in': 1}),
                             ('decoder', 'sparse_to_dense', {'out': 0, 'in': 0}),
                             ('decoder', 'sparse_to_dense', {'out': 2, 'in': 1}),
                             ('decoder', 'cast', {'out': 1, 'in': 0}),
                             ('cast', 'sparse_to_dense', {'out': 0}),
                             ('sparse_to_dense', 'last', {'out': 0, 'in': 0}),
                             ], nodes_with_edges_only=True)
        graph.stage = 'front'
        CTCGreedyDecoderReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes,
                                [('logits', 'transpose', {'out': 0, 'in': 0}),
                                 ('order_arr', 'transpose', {'out': 0, 'in': 1}),
                                 ('transpose', 'decoder', {'out': 0, 'in': 0}),
                                 ('seq_len', 'decoder', {'out': 0, 'in': 1}),
                                 ('decoder', 'last', {'out': 0, 'in': 0}),
                                 ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph, graph_ref, 'last', check_op_attrs=True)
        self.assertTrue(flag, resp)
Exemple #11
0
import unittest

import numpy as np

from extensions.front.tf.TFSliceToSlice import TFSliceToSliceReplacer
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import build_graph, regular_op_with_empty_data, result, const, connect_front

nodes = {
    **regular_op_with_empty_data('input', {'type': 'Parameter'}),
    **regular_op_with_empty_data('tfslice', {
        'op': 'TFSlice',
        'type': None
    }),
    **const('begin', np.array(0)),
    **const('size', np.array([-1])),
    **regular_op_with_empty_data('john_doe', {
        'op': 'Sum',
        'type': None
    }),
    **result(),

    # nodes after replacement
    **const('minus_one', np.array(-1)),
    **const('int32_max', np.array(np.iinfo(np.int32).max)),
    **regular_op_with_empty_data('end_const', {
        'op': 'Add',
        'type': 'Add'
    }),
    **regular_op_with_empty_data('equal', {
Exemple #12
0
        'max': np.array(3.5, dtype=np.float32)
    },
    'result': {
        'type': 'Result',
        'value': None,
        'kind': 'op',
        'op': 'Result'
    },

    # new Clamp layer and inputs
    'clamp': {
        'type': None,
        'kind': 'op',
        'op': 'Clamp'
    },
    **const('min', np.array(-3.5, dtype=np.float32)),
    **const('max', np.array(3.5, dtype=np.float32)),
}


class AttributedClampNormalizerTest(unittest.TestCase):
    def test_1(self):
        graph = build_graph(nodes_attributes, [
            ('placeholder', 'attr_clamp', {
                'in': 0,
                'out': 0
            }),
            ('attr_clamp', 'result', {
                'in': 0,
                'out': 0
            }),
Exemple #13
0
    def test1(self):
        nodes_attributes = {
            'logits': {
                'shape': int64_array([2, 6, 100]),
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'seq_mask': {
                'shape': int64_array([2, 100]),
                'data_type': np.int32,
                'kind': 'op',
                'op': 'Parameter'
            },
            'reduce_seq_mask': {
                'kind': 'op',
                'op': 'ReduceSum'
            },
            's_cast_seq_mask': {
                'kind': 'op',
                'op': 'Cast'
            },
            'transpose_cast_seq_mask': {
                'kind': 'op',
                'op': 'Transpose'
            },
            'transpose': {
                'kind': 'op',
                'op': 'Transpose'
            },
            'ctc_greedy_decoder': {
                'kind': 'op',
                'op': 'CTCGreedyDecoder'
            },
            'cast': {
                'kind': 'op',
                'op': 'Cast'
            },
            'sparse_to_dense': {
                'kind': 'op',
                'op': 'SparseToDense'
            },
            'const': {
                'kind': 'op',
                'op': 'Const'
            },
            'ctc_loss': {
                'kind': 'op',
                'op': 'CTCLoss',
                'preprocess_collapse_repeated': False,
                'ctc_merge_repeated': True,
                'unique': False
            },
            'equal_op': {
                'kind': 'op',
                'op': 'Equal'
            },
            'ctc_greedy_decoder_op': {
                'kind': 'op',
                'op': 'CTCGreedyDecoder'
            },
            'ctc_loss_op': {
                'kind': 'op',
                'op': 'CTCLoss'
            },
            'squeeze_op': {
                'kind': 'op',
                'op': 'Squeeze'
            },
            'cast_labels_op': {
                'kind': 'op',
                'op': 'Cast',
                'type': 'Convert'
            },
            'labels_shape_op': {
                'kind': 'op',
                'op': 'ShapeOf'
            },
            'broadcast_one_op': {
                'kind': 'op',
                'op': 'Broadcast'
            },
            'broadcast_zero_op': {
                'kind': 'op',
                'op': 'Broadcast'
            },
            'select_op': {
                'kind': 'op',
                'op': 'Select'
            },
            'label_length_op': {
                'kind': 'op',
                'op': 'ReduceSum'
            },
            **const('reduce_indices', int64_array(1)),
            **const('permute_order', int64_array([1, 0])),
            **const('default_value', int64_array(-1)),
            **const('squeeze_axis', int64_array([2, 3])),
            **const('minus_one', np.array([-1], dtype=np.int32)),
            **const('one', np.array([1], dtype=np.int32)),
            **const('zero', np.array([0], dtype=np.int32)),
            **const('reduce_sum_axis', int64_array([1])),
            'last': {
                'type': None,
                'value': None,
                'kind': 'op',
                'op': 'Result'
            },
        }

        graph = build_graph(nodes_attributes, [
            ('logits', 'transpose', {
                'out': 0,
                'in': 0
            }),
            ('transpose', 'ctc_greedy_decoder', {
                'out': 0,
                'in': 0
            }),
            ('seq_mask', 'ctc_greedy_decoder', {
                'out': 0,
                'in': 1
            }),
            ('transpose', 'ctc_loss', {
                'out': 0,
                'in': 0
            }),
            ('seq_mask', 'ctc_loss', {
                'out': 0,
                'in': 3
            }),
            ('ctc_greedy_decoder', 'sparse_to_dense', {
                'out': 0,
                'in': 0
            }),
            ('ctc_greedy_decoder', 'sparse_to_dense', {
                'out': 2,
                'in': 1
            }),
            ('ctc_greedy_decoder', 'sparse_to_dense', {
                'out': 1,
                'in': 2
            }),
            ('default_value', 'sparse_to_dense', {
                'out': 0,
                'in': 3
            }),
            ('ctc_greedy_decoder', 'cast', {
                'out': 1,
                'in': 0
            }),
            ('ctc_greedy_decoder', 'ctc_loss', {
                'out': 0,
                'in': 1
            }),
            ('cast', 'ctc_loss', {
                'out': 0,
                'in': 2
            }),
            ('ctc_loss', 'last', {
                'out': 0,
                'in': 0
            }),
        ],
                            nodes_with_edges_only=True)
        graph.graph['cmd_params'] = Namespace(data_type='FP32')
        graph.stage = 'front'
        CTCLossReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(
            nodes_attributes,
            [('seq_mask', 'reduce_seq_mask', {
                'out': 0,
                'in': 0
            }), ('reduce_indices', 'reduce_seq_mask', {
                'out': 0,
                'in': 1
            }), ('seq_mask', 's_cast_seq_mask', {
                'out': 0,
                'in': 0
            }),
             ('s_cast_seq_mask', 'transpose_cast_seq_mask', {
                 'out': 0,
                 'in': 0
             }),
             ('permute_order', 'transpose_cast_seq_mask', {
                 'out': 0,
                 'in': 1
             }), ('logits', 'transpose', {
                 'out': 0,
                 'in': 0
             }), ('transpose', 'ctc_greedy_decoder_op', {
                 'out': 0,
                 'in': 0
             }),
             ('transpose_cast_seq_mask', 'ctc_greedy_decoder_op', {
                 'out': 0,
                 'in': 1
             }), ('ctc_greedy_decoder_op', 'squeeze_op', {
                 'out': 0,
                 'in': 0
             }), ('squeeze_axis', 'squeeze_op', {
                 'out': 0,
                 'in': 1
             }), ('squeeze_op', 'cast_labels_op', {
                 'in': 0
             }), ('minus_one', 'equal_op', {
                 'out': 0,
                 'in': 1
             }), ('equal_op', 'labels_shape_op', {
                 'out': 0,
                 'in': 0
             }), ('one', 'broadcast_one_op', {
                 'out': 0,
                 'in': 0
             }), ('labels_shape_op', 'broadcast_one_op', {
                 'out': 0,
                 'in': 1
             }), ('zero', 'broadcast_zero_op', {
                 'out': 0,
                 'in': 0
             }), ('labels_shape_op', 'broadcast_zero_op', {
                 'out': 0,
                 'in': 1
             }), ('equal_op', 'select_op', {
                 'out': 0,
                 'in': 0
             }), ('broadcast_zero_op', 'select_op', {
                 'out': 0,
                 'in': 1
             }), ('broadcast_one_op', 'select_op', {
                 'out': 0,
                 'in': 2
             }), ('select_op', 'label_length_op', {
                 'out': 0,
                 'in': 0
             }), ('reduce_sum_axis', 'label_length_op', {
                 'out': 0,
                 'in': 1
             }), ('logits', 'ctc_loss_op', {
                 'out': 0,
                 'in': 0
             }), ('reduce_seq_mask', 'ctc_loss_op', {
                 'out': 0,
                 'in': 1
             }), ('cast_labels_op', 'ctc_loss_op', {
                 'out': 0,
                 'in': 2
             }), ('label_length_op', 'ctc_loss_op', {
                 'out': 0,
                 'in': 3
             }), ('cast_labels_op', 'equal_op', {
                 'out': 0,
                 'in': 0
             }), ('ctc_loss_op', 'last', {
                 'out': 0,
                 'in': 0
             })],
            nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Exemple #14
0
    def test_tdnnreplacer(self, weights, biases, time_offsets):
        def generate_offsets():
            offset_edges = []
            offset_nodes = {}

            for i, t in enumerate(time_offsets):
                offset_nodes.update(**regular_op('memoryoffset_' +
                                                 str(i), {'type': None}))

                if t != 0:
                    offset_edges.append(
                        ('placeholder', 'memoryoffset_' + str(i), {
                            'out': 0,
                            'in': 0
                        }))
                    offset_edges.append(('memoryoffset_' + str(i), 'concat', {
                        'out': 0,
                        'in': i
                    }))
                else:
                    offset_edges.append(('placeholder', 'concat', {
                        'out': 0,
                        'in': i
                    }))

            return offset_nodes, offset_edges

        offset_nodes, ref_offset_edges = generate_offsets()

        nodes = {
            **offset_nodes,
            **regular_op('placeholder', {'type': 'Parameter'}),
            **regular_op(
                'tdnncomponent', {
                    'op': 'tdnncomponent',
                    'weights': np.array(weights),
                    'biases': np.array(biases),
                    'time_offsets': np.array(time_offsets)
                }),
            **const('weights', np.array(weights)),
            **const('biases', np.array(biases)),
            **regular_op('concat', {
                'type': 'Concat',
                'axis': 1
            }),
            **regular_op('memoryoffset_0', {'type': None}),
            **regular_op('memoryoffset_1', {'type': None}),
            **regular_op('memoryoffset_2', {'type': None}),
            **regular_op('fully_connected', {'type': 'FullyConnected'}),
            **result('result'),
        }

        graph = build_graph(nodes, [
            *connect_front('placeholder', 'tdnncomponent'),
            *connect_front('tdnncomponent', 'result')
        ],
                            nodes_with_edges_only=True)

        graph.stage = 'front'

        ref_graph = build_graph(nodes, [
            *ref_offset_edges, *connect_front('concat', '0:fully_connected'),
            *connect_front('weights', '1:fully_connected'),
            *connect_front('biases', '2:fully_connected'),
            *connect_front('fully_connected', 'result')
        ],
                                nodes_with_edges_only=True)

        TdnnComponentReplacer().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      ref_graph,
                                      'result',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
 def test_convert_slice_to_strided_slice(self, input_shape, start, end,
                                         axes, steps, ss_begin_parts: tuple,
                                         ss_end_parts: tuple, ss_steps,
                                         ss_begin_mask, ss_end_mask):
     graph = build_graph(
         nodes_attrs={
             **regular_op_with_shaped_data('input', input_shape, {
                 'type': 'Parameter'
             }),
             **valued_const_with_data('start', start),
             **valued_const_with_data('end', end),
             **valued_const_with_data('axes', axes),
             **valued_const_with_data('steps', steps),
             **regular_op_with_empty_data('slice', {
                 'type': None,
                 'op': 'Slice'
             }),
             **result('result')
         },
         edges=[
             *connect('input', 'slice'), *connect('start', '1:slice'),
             *connect('end', '2:slice'), *connect('axes', '3:slice'),
             *connect('steps', '4:slice'), *connect('slice', 'result')
         ])
     ref_graph = build_graph(nodes_attrs={
         **regular_op_with_shaped_data('input', input_shape, {
             'type': 'Parameter'
         }),
         **valued_const_with_data('start', start),
         **valued_const_with_data('begin_first_part', ss_begin_parts[0]),
         **valued_const_with_data('begin_last_part', ss_begin_parts[1]),
         **regular_op_with_empty_data('convert_start', {
             'op': 'Cast',
             'type': 'Convert',
             'dst_type': np.int64
         }),
         **regular_op_with_empty_data('ss_begin', {
             'type': 'Concat',
             'op': 'Concat',
             'axis': 0
         }),
         **valued_const_with_data('end', end),
         **valued_const_with_data('end_first_part', ss_end_parts[0]),
         **valued_const_with_data('end_last_part', ss_end_parts[1]),
         **regular_op_with_empty_data('convert_end', {
             'op': 'Cast',
             'type': 'Convert',
             'dst_type': np.int64
         }),
         **regular_op_with_empty_data('ss_end', {
             'type': 'Concat',
             'op': 'Concat',
             'axis': 0
         }),
         **const('ss_steps', ss_steps),
         **empty_data('ss_steps_d'),
         **regular_op_with_empty_data(
             'ss', {
                 'op': 'StridedSlice',
                 'type': 'StridedSlice',
                 'begin_mask': ss_begin_mask,
                 'end_mask': ss_end_mask,
                 'new_axis_mask': np.zeros(len(input_shape), dtype=np.int64),
                 'shrink_axis_mask': np.zeros(len(input_shape),
                                              dtype=np.int64),
                 'ellipsis_mask': np.zeros(len(input_shape), dtype=np.int64)
             }),
         **result('result')
     },
                             edges=[
                                 *connect('input', 'ss'),
                                 *connect('begin_first_part', 'ss_begin'),
                                 *connect('start', 'convert_start'),
                                 *connect('convert_start', '1:ss_begin'),
                                 *connect('begin_last_part', '2:ss_begin'),
                                 *connect('ss_begin', '1:ss'),
                                 *connect('end_first_part', 'ss_end'),
                                 *connect('end', 'convert_end'),
                                 *connect('convert_end', '1:ss_end'),
                                 *connect('end_last_part', '2:ss_end'),
                                 *connect('ss_end', '2:ss'),
                                 *connect('ss_steps', '3:ss'),
                                 *connect('ss', 'result')
                             ])
     ConvertSlice().find_and_replace_pattern(graph)
     (flag, resp) = compare_graphs(graph,
                                   ref_graph,
                                   'result',
                                   check_op_attrs=True)
     self.assertTrue(flag, resp)
import numpy as np

from extensions.front.AttributedPadToPad import AttributedPadToPad
from mo.front.common.partial_infer.utils import int64_array
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import build_graph, const

nodes_attributes = {
    'placeholder': {'shape': None, 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
    'attr_pad': {'type': None, 'kind': 'op', 'op': 'AttributedPad', 'mode': 'constant', 'name': 'attr_pad',
                 'pads': int64_array([1, 2, 3, 4, 5, 6]).reshape([3, 2]), 'fill_value': 0.75},
    'result': {'type': 'Result', 'value': None, 'kind': 'op', 'op': 'Result'},

    # new Pad layer and inputs
    'pad': {'type': 'Pad', 'kind': 'op', 'op': 'Pad', 'mode': 'constant'},
    **const('pad_begin', int64_array([1, 3, 5])),
    **const('pad_end', int64_array([2, 4, 6])),
    **const('pad_fill', np.array(0.75)),
}


class AttributedPadToPadTest(unittest.TestCase):
    def test_mode_constant(self):
        graph = build_graph(nodes_attributes,
                            [('placeholder', 'attr_pad', {'in': 0, 'out': 0}),
                             ('attr_pad', 'result', {'in': 0, 'out': 0}),
                             ],
                            {}, nodes_with_edges_only=True)

        graph_ref = build_graph(nodes_attributes,
                                [('placeholder', 'pad', {'in': 0, 'out': 0}),
from mo.utils.unittest.graph import build_graph, regular_op_with_empty_data, result, const, connect_front

nodes = {
    **regular_op_with_empty_data('input', {'type': 'Parameter'}),
    **regular_op_with_empty_data(
        'mvn_onnx', {
            'op': 'MVNOnnx',
            'axes': int64_array([2, 3]),
            'eps': 1e-9,
            'eps_mode': 'outside_sqrt',
            'normalize_variance': 1
        }),
    **result(),

    # nodes after replacement
    **const('axes', int64_array([2, 3])),
    **regular_op_with_empty_data('mvn', {
        'op': 'MVN',
        'type': None
    }),
}


class MvnOnnxToMvnTest(unittest.TestCase):
    def test_mvn_normalize(self):
        graph = build_graph(nodes, [('input', 'mvn_onnx'),
                                    ('mvn_onnx', 'output')],
                            nodes_with_edges_only=True)
        graph.stage = 'front'

        MvnOnnxToMvn().find_and_replace_pattern(graph)
nodes_attributes = {
    'placeholder': {
        'shape': None,
        'type': 'Parameter',
        'kind': 'op',
        'op': 'Parameter'
    },
    'tfpad': {
        'type': None,
        'kind': 'op',
        'op': 'TFPad',
        'mode': 'constant',
        'name': 'tfpad_name'
    },
    **const('paddings',
            int64_array([1, 2, 3, 4, 5, 6]).reshape([3, 2])),
    **const('fill', float_array(5.75)),
    'result': {
        'type': 'Result',
        'value': None,
        'kind': 'op',
        'op': 'Result'
    },

    # new Pad layer and sub-graph
    'pad': {
        'type': 'Pad',
        'kind': 'op',
        'op': 'Pad',
        'mode': 'constant'
    },
 'attr_split': {
     'type': None,
     'kind': 'op',
     'op': 'AttributedSplit',
     'axis': 0,
     'num_splits': 2,
     'squeeze_axis': True
 },
 'split': {
     'type': 'Split',
     'kind': 'op',
     'op': 'Split',
     'num_splits': 2,
     'squeeze_axis': True
 },
 **const('split_axis', int64_array(0)),
 'concat': {
     'type': 'Concat',
     'kind': 'op',
     'op': 'Concat',
     'axis': 0
 },
 'result': {
     'type': 'Result',
     'value': None,
     'kind': 'op',
     'op': 'Result'
 },
 'squeeze1': {
     'type': 'Squeeze',
     'kind': 'op',
import unittest

import numpy as np

from extensions.front.caffe.MVNCaffeToMVN import MVNCaffeToMVN
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import build_graph, regular_op_with_empty_data, result, const, connect_front

nodes = {
    **regular_op_with_empty_data('input', {'type': 'Parameter'}),
    **regular_op_with_empty_data('mvn_caffe', {'op': 'MVNCaffe'}),
    **result(),

    # nodes after replacement
    **const('start_1', np.array(1)),
    **const('start_2', np.array(2)),
    **const('step', np.array(1)),
    **regular_op_with_empty_data('rank', {
        'op': 'Rank',
        'type': None
    }),
    **regular_op_with_empty_data('range', {
        'op': 'Range',
        'type': None
    }),
    **regular_op_with_empty_data('mvn', {
        'op': 'MVN',
        'type': None
    }),
}
Exemple #21
0
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import unittest

import numpy as np

from extensions.front.onnx.pad_converter import ONNXPadToPad
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import build_graph, const

nodes_attributes = {
    'placeholder': {'shape': None, 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
    **const('pads', np.array([1, 2, 3, 4], dtype=np.int64)),
    **const('value', np.array(0.5, dtype=np.float32)),
    'onnx_pad': {'type': None, 'kind': 'op', 'op': 'ONNXPad', 'name': 'my_pad', 'mode': 'constant'},
    'result': {'type': 'Result', 'value': None, 'kind': 'op', 'op': 'Result'},

    'pad': {'type': 'Pad', 'kind': 'op', 'op': 'Pad'},
    'split': {'type': 'Split', 'kind': 'op', 'op': 'Split', 'num_splits': 2},
    **const('split_axis', np.array(0, dtype=np.int32)),
}


class AttributedClampNormalizerTest(unittest.TestCase):
    def test_1(self):
        graph = build_graph(nodes_attributes,
                            [('placeholder', 'onnx_pad', {'in': 0, 'out': 0}),
                             ('pads', 'onnx_pad', {'in': 1, 'out': 0}),
                             ('value', 'onnx_pad', {'in': 2, 'out': 0}),
                             ('onnx_pad', 'result', {'in': 0, 'out': 0}),
Exemple #22
0
import unittest

import numpy as np

from extensions.front.tf.TFSliceToSlice import TFSliceToSliceReplacer
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import build_graph, regular_op_with_empty_data, result, const, connect_front

nodes = {
    **regular_op_with_empty_data('input', {'type': 'Parameter'}),
    **regular_op_with_empty_data('tfslice', {
        'op': 'TFSlice',
        'type': None
    }),
    **const('begin', np.array(0)),
    **const('size', np.array([-1])),
    **regular_op_with_empty_data('john_doe', {
        'op': 'Sum',
        'type': None
    }),
    **result(),

    # nodes after replacement
    **const('minus_one', np.array(-1)),
    **regular_op_with_empty_data('shapeof', {
        'op': 'ShapeOf',
        'type': 'ShapeOf'
    }),
    **regular_op_with_empty_data('end_const', {
        'op': 'Add',
Exemple #23
0
    def test1(self):
        nodes_attributes = {
            'logits': {
                'shape': int64_array([2, 6, 100]),
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'seq_mask': {
                'shape': int64_array([2]),
                'data_type': np.int32,
                'kind': 'op',
                'op': 'Parameter'
            },
            'transpose': {
                'kind': 'op',
                'op': 'Transpose'
            },
            'ctc_greedy_decoder': {
                'kind': 'op',
                'op': 'CTCGreedyDecoderSeqLen',
                'merge_repeated': True
            },
            'cast': {
                'kind': 'op',
                'op': 'Cast'
            },
            'sparse_to_dense': {
                'kind': 'op',
                'op': 'SparseToDense'
            },
            'tf_ctc_loss': {
                'kind': 'op',
                'op': 'CTCLoss',
                'preprocess_collapse_repeated': False,
                'ctc_merge_repeated': True,
                'unique': False,
                'logits_time_major': True
            },
            'ctc_loss': {
                'kind': 'op',
                'op': 'CTCLoss',
                'preprocess_collapse_repeated': False,
                'ctc_merge_repeated': True,
                'unique': False
            },
            **const('default_value', int64_array(-1)),
            'last': {
                'type': None,
                'value': None,
                'kind': 'op',
                'op': 'Result'
            },
            'transpose2': {
                'kind': 'op',
                'op': 'Transpose'
            },
            **const('transpose2_axis', int64_array([1, 0, 2])),
        }
        graph = build_graph(nodes_attributes, [('logits', 'transpose', {
            'out': 0,
            'in': 0
        }), ('transpose', 'ctc_greedy_decoder', {
            'out': 0,
            'in': 0
        }), ('seq_mask', 'ctc_greedy_decoder', {
            'out': 0,
            'in': 1
        }), ('transpose', 'tf_ctc_loss', {
            'out': 0,
            'in': 0
        }), ('seq_mask', 'tf_ctc_loss', {
            'out': 0,
            'in': 3
        }), ('ctc_greedy_decoder', 'sparse_to_dense', {
            'out': 0,
            'in': 0
        }), ('ctc_greedy_decoder', 'sparse_to_dense', {
            'out': 2,
            'in': 1
        }), ('ctc_greedy_decoder', 'sparse_to_dense', {
            'out': 1,
            'in': 2
        }), ('default_value', 'sparse_to_dense', {
            'out': 0,
            'in': 3
        }), ('ctc_greedy_decoder', 'cast', {
            'out': 1,
            'in': 0
        }), ('ctc_greedy_decoder', 'tf_ctc_loss', {
            'out': 0,
            'in': 1
        }), ('cast', 'tf_ctc_loss', {
            'out': 0,
            'in': 2
        }), ('tf_ctc_loss', 'last', {
            'out': 0,
            'in': 0
        })],
                            nodes_with_edges_only=True)
        graph.graph['cmd_params'] = Namespace(data_type='FP32')
        graph.stage = 'front'
        CTCLossReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes, [('logits', 'transpose', {
            'out': 0,
            'in': 0
        }), ('transpose', 'transpose2', {
            'out': 0,
            'in': 0
        }), ('transpose2_axis', 'transpose2', {
            'out': 0,
            'in': 1
        }), ('transpose2', 'ctc_greedy_decoder', {
            'out': 0,
            'in': 0
        }), ('seq_mask', 'ctc_greedy_decoder', {
            'out': 0,
            'in': 1
        }), ('transpose2', 'ctc_loss', {
            'out': 0,
            'in': 0
        }), ('ctc_greedy_decoder', 'ctc_loss', {
            'out': 0,
            'in': 2
        }), ('ctc_greedy_decoder', 'ctc_loss', {
            'out': 1,
            'in': 3
        }), ('seq_mask', 'ctc_loss', {
            'out': 0,
            'in': 1
        }), ('ctc_loss', 'last', {
            'out': 0,
            'in': 0
        })],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'last',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
 def test_convert_slice_to_strided_slice_without_axes_and_steps(self):
     graph = build_graph(nodes_attrs={
         **regular_op_with_shaped_data('input', int64_array([2, 5, 10]), {
                                           'type': 'Parameter'
                                       }),
         **valued_const_with_data('start', np.array([0, 0, 0])),
         **valued_const_with_data('end', np.array([1, 3, 5])),
         **regular_op_with_empty_data('slice', {
             'type': None,
             'op': 'Slice'
         }),
         **result('result')
     },
                         edges=[
                             *connect('input', 'slice'),
                             *connect('start', '1:slice'),
                             *connect('end', '2:slice'),
                             *connect('slice', 'result')
                         ])
     ref_graph = build_graph(nodes_attrs={
         **regular_op_with_shaped_data('input', int64_array([2, 5, 10]), {
                                           'type': 'Parameter'
                                       }),
         **valued_const_with_data('start', np.array([0, 0, 0])),
         **valued_const_with_data('begin_first_part', int64_array([])),
         **valued_const_with_data('begin_last_part', int64_array([])),
         **regular_op_with_empty_data('convert_start', {
             'op': 'Cast',
             'type': 'Convert',
             'dst_type': np.int64
         }),
         **regular_op_with_empty_data('ss_begin', {
             'type': 'Concat',
             'op': 'Concat',
             'axis': 0
         }),
         **valued_const_with_data('end', np.array([1, 3, 5])),
         **valued_const_with_data('end_first_part', int64_array([])),
         **valued_const_with_data('end_last_part', int64_array([])),
         **regular_op_with_empty_data('convert_end', {
             'op': 'Cast',
             'type': 'Convert',
             'dst_type': np.int64
         }),
         **regular_op_with_empty_data('ss_end', {
             'type': 'Concat',
             'op': 'Concat',
             'axis': 0
         }),
         **const('ss_steps', int64_array([1, 1, 1])),
         **empty_data('ss_steps_d'),
         **regular_op_with_empty_data(
             'ss', {
                 'op': 'StridedSlice',
                 'type': 'StridedSlice',
                 'begin_mask': int64_array([1, 1, 1]),
                 'end_mask': int64_array([1, 1, 1]),
                 'new_axis_mask': np.zeros(3, dtype=np.int64),
                 'shrink_axis_mask': np.zeros(3, dtype=np.int64),
                 'ellipsis_mask': np.zeros(3, dtype=np.int64)
             }),
         **result('result')
     },
                             edges=[
                                 *connect('input', 'ss'),
                                 *connect('begin_first_part', 'ss_begin'),
                                 *connect('start', 'convert_start'),
                                 *connect('convert_start', '1:ss_begin'),
                                 *connect('begin_last_part', '2:ss_begin'),
                                 *connect('ss_begin', '1:ss'),
                                 *connect('end_first_part', 'ss_end'),
                                 *connect('end', 'convert_end'),
                                 *connect('convert_end', '1:ss_end'),
                                 *connect('end_last_part', '2:ss_end'),
                                 *connect('ss_end', '2:ss'),
                                 *connect('ss_steps', '3:ss'),
                                 *connect('ss', 'result')
                             ])
     ConvertSlice().find_and_replace_pattern(graph)
     (flag, resp) = compare_graphs(graph,
                                   ref_graph,
                                   'result',
                                   check_op_attrs=True)
     self.assertTrue(flag, resp)
Exemple #25
0
class HSigmoidWithReluMulTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {'type': 'Parameter'}),
        **regular_op('add', {'op': 'Add'}),
        **regular_op('relu', {'op': 'ReLU'}),
        **regular_op('min', {'op': 'Minimum'}),
        **regular_op('mul', {
            'op': 'Mul',
            'name': 'final_mul'
        }),
        **const('add_const', float_array([3.0])),
        **const('min_const', float_array([6.0])),
        **const('mul_const', float_array([1.0 / 6.0])),
        **result('result'),
    }

    edges = [('input', 'add', {
        'in': 0,
        'out': 0
    }), ('add_const', 'add', {
        'in': 1,
        'out': 0
    }), ('add', 'relu', {
        'in': 0,
        'out': 0
    }), ('relu', 'min', {
        'in': 0,
        'out': 0
    }), ('min_const', 'min', {
        'in': 1,
        'out': 0
    }), ('min', 'mul', {
        'in': 0,
        'out': 0
    }), ('mul_const', 'mul', {
        'in': 1,
        'out': 0
    }), ('mul', 'result', {
        'in': 0,
        'out': 0
    })]

    def test_hsigmoid_with_relu_mul(self):
        graph = build_graph_with_edge_attrs(self.nodes, self.edges, {})

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'HSigmoid')
        self.assertTrue(
            graph.get_op_nodes(
                name='final_mul')[0].out_nodes()[0].node == 'result')

    def test_hsigmoid_with_relu_mul_wrong_constant(self):
        graph = build_graph_with_edge_attrs(
            self.nodes, self.edges,
            {'add_const': {
                'value': float_array([0.00001])
            }})

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)

    def test_hsigmoid_with_relu_mul_different_tensors(self):
        graph = build_graph_with_edge_attrs(
            {
                **regular_op('input', {'type': 'Parameter'}),
                **regular_op('input_2', {'type': 'Parameter'}),
                **regular_op('add', {'op': 'Add'}),
                **regular_op('max', {'op': 'Maximum'}),
                **regular_op('min', {'op': 'Minimum'}),
                **regular_op('mul', {'op': 'Mul'}),
                **regular_op('mul_2', {
                    'op': 'Mul',
                    'name': 'final_mul'
                }),
                **const('const_0', float_array([0.0])),
                **const('const_3', float_array([3.0])),
                **const('const_6', float_array([6.0])),
                **const('const_1_6', float_array([1.0 / 6.0])),
                **result('result'),
            }, [('input_2', 'mul', {
                'in': 1,
                'out': 0
            }), ('input', 'add', {
                'in': 0,
                'out': 0
            }), ('const_3', 'add', {
                'in': 1,
                'out': 0
            }), ('add', 'max', {
                'in': 0,
                'out': 0
            }), ('const_0', 'max', {
                'in': 1,
                'out': 0
            }), ('max', 'min', {
                'in': 0,
                'out': 0
            }), ('const_6', 'min', {
                'in': 1,
                'out': 0
            }), ('min', 'mul', {
                'in': 0,
                'out': 0
            }), ('mul', 'mul_2', {
                'in': 0,
                'out': 0
            }), ('const_1_6', 'mul_2', {
                'in': 1,
                'out': 0
            }), ('mul_2', 'result', {
                'in': 0,
                'out': 0
            })])

        graph_ref = graph.copy()
        graph.stage = 'front'

        HSigmoidWithReluMul().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
class GeLUMergerErfTest(unittest.TestCase):
    nodes = {
        **regular_op('input', {
            'op': 'Parameter',
            'type': 'Parameter'
        }),
        **regular_op('mul', {'op': 'Mul'}),
        **regular_op('mul0', {
            'op': 'Mul',
            'name': 'final_mul'
        }),
        **regular_op('div', {'op': 'Div'}),
        **regular_op('erf', {'op': 'Erf'}),
        **regular_op('add', {'op': 'Add'}),
        **const('mul_param', float_array([0.5])),
        **const('div_param', float_array([sqrt(2.)])),
        **const('add_param', int64_array([1])),
        **result('result'),
    }

    def test_gelu_p1(self):
        edges = [('input', 'mul'), ('mul', 'mul0'), ('input', 'div'),
                 ('div', 'erf'), ('erf', 'add'), ('add', 'mul0'),
                 ('mul_param', 'mul'), ('div_param', 'div'),
                 ('add_param', 'add'), ('mul0', 'result')]

        graph = build_graph(self.nodes, edges)

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        GeLUMergerErf().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            graph.get_op_nodes(op='Gelu')[0].approximation == 'erf')
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')

    def test_gelu_p2(self):
        edges = [('input', 'mul'), ('div', 'erf'), ('erf', 'add'),
                 ('add', 'mul'), ('mul', 'mul0'), ('mul_param', 'mul0'),
                 ('div_param', 'div'), ('add_param', 'add'),
                 ('mul0', 'result')]

        graph = build_graph(self.nodes, edges)

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        GeLUMergerErf().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            graph.get_op_nodes(op='Gelu')[0].approximation == 'erf')
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')

    def test_gelu_p3(self):
        edges = [('input', 'mul'), ('div', 'erf'), ('erf', 'add'),
                 ('add', 'mul'), ('mul', 'mul0'), ('mul_param', 'mul'),
                 ('div_param', 'div'), ('add_param', 'add'),
                 ('mul0', 'result')]

        graph = build_graph(self.nodes, edges)

        graph_ref = build_graph(ref_nodes, ref_edges)
        graph.stage = 'front'

        GeLUMergerErf().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
        self.assertTrue(
            graph.get_op_nodes(op='Gelu')[0].approximation == 'erf')
        self.assertTrue(
            len(graph.get_op_nodes(name='final_mul')) == 1
            and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')
Exemple #27
0
    def test_per_sample_weights(self):
        nodes = {
            **const('weights_inp', np.random.randn(100, 2)),
            **regular_op('indices_inp', {'type': 'Parameter'}),
            **regular_op('offsets_inp', {'type': 'Parameter'}),
            **regular_op('per_sample_weights', {'type': 'Parameter'}),
            **regular_op(
                'aten', {
                    'type': None,
                    'kind': 'op',
                    'op': 'ATen',
                    'operator': 'embedding_bag',
                    'mode': 0,
                    'name': 'my_aten'
                }),
            **regular_op(
                'emb_bag', {
                    'type': 'EmbeddingBagOffsetsSum',
                    'kind': 'op',
                    'op': 'EmbeddingBagOffsetsSum'
                }),
            **regular_op('WeightsRank', {
                'type': None,
                'kind': 'op',
                'op': 'Rank'
            }),
            **regular_op('WeightsRank/axis', {
                'type': 'Add',
                'kind': 'op',
                'op': 'Add'
            }),
            **regular_op('gather1', {
                'type': 'Gather',
                'kind': 'op',
                'op': 'Gather'
            }),
            **regular_op('gather2', {
                'type': 'Gather',
                'kind': 'op',
                'op': 'Gather'
            }),
            **regular_op('WeightsShape', {
                'type': 'ShapeOf',
                'kind': 'op',
                'op': 'ShapeOf'
            }),
            **regular_op('Broadcast', {
                'type': 'Broadcast',
                'kind': 'op',
                'op': 'Broadcast'
            }),
            **regular_op('Unsqueeze', {
                'type': 'Unsqueeze',
                'kind': 'op',
                'op': 'Unsqueeze'
            }),
            **const('WeightsShape/Axis', int64_array(0)),
            **const('zero1', int64_array(0)),
            **const('zero2', int64_array(0)),
            **const('Unsqueeze/value', int64_array(0)),
            **const('Broadcast/value', int64_array(0)),
            **const('neg', int64_array(-1)),
            **regular_op('Concat', {
                'type': 'Concat',
                'kind': 'op',
                'op': 'Concat'
            }),
            **result('result'),
        }
        edges = [
            ('weights_inp', 'aten'),
            ('indices_inp', 'aten'),
            ('offsets_inp', 'aten'),
            ('per_sample_weights', 'aten'),
            ('aten', 'result'),
        ]
        graph = build_graph(nodes, edges, nodes_with_edges_only=True)

        graph.graph['layout'] = 'NCHW'
        graph.stage = 'front'

        edges_ref = [
            ('weights_inp', 'Concat', {
                'in': 0,
                'out': 0
            }),
            ('weights_inp', 'WeightsShape', {
                'in': 0,
                'out': 0
            }),
            ('weights_inp', 'WeightsRank', {
                'in': 0,
                'out': 0
            }),
            ('WeightsRank', 'WeightsRank/axis'),
            ('neg', 'WeightsRank/axis'),
            ('WeightsShape', 'gather1', {
                'in': 0,
                'out': 0
            }),
            ('WeightsRank/axis', 'gather1'),
            ('WeightsShape/Axis', 'gather1'),
            ('WeightsShape', 'gather2', {
                'in': 0,
                'out': 0
            }),
            ('zero1', 'gather2'),
            ('zero2', 'gather2'),
            ('Broadcast/value', 'Broadcast'),
            ('gather1', 'Broadcast'),
            ('Broadcast', 'Unsqueeze'),
            ('Unsqueeze/value', 'Unsqueeze'),
            ('Unsqueeze', 'Concat'),
            ('Concat', 'emb_bag'),
            ('indices_inp', 'emb_bag'),
            ('offsets_inp', 'emb_bag'),
            ('gather2', 'emb_bag'),
            ('per_sample_weights', 'emb_bag'),
            ('emb_bag', 'result'),
        ]

        graph_ref = build_graph(nodes, edges_ref, nodes_with_edges_only=True)

        AtenToEmbeddingBag().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)
    def test1(self):
        nodes_attributes = {
            # nodes from original graph
            'input': {
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'index': {
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'add': {
                'type': 'Add',
                'kind': 'op',
                'op': 'Add'
            },
            **const('slice_size', int64_array(1)),
            'begin': {
                'type': 'Pack',
                'kind': 'op',
                'op': 'Pack'
            },
            **const('begin_1', int64_array(0)),
            **const('begin_3', int64_array(0)),
            'end': {
                'type': 'Pack',
                'kind': 'op',
                'op': 'Pack'
            },
            **const('end_1', int64_array(0)),
            **const('end_3', int64_array(0)),
            **const('step', int64_array([1, 1, 1])),
            'strided_slice': {
                'type': 'StridedSlice',
                'kind': 'op',
                'op': 'StridedSlice',
                'begin_mask': int64_array([0, 1, 0]),
                'end_mask': int64_array([0, 1, 0]),
                'shrink_axis_mask': int64_array([0, 1, 0]),
                'name': 'non_const_begin_strided_slice'
            },
            'result': {
                'type': 'Result',
                'kind': 'op',
                'op': 'Result'
            },

            # nodes from the reference graph
            'unsqueeze': {
                'type': 'Unsqueeze',
                'kind': 'op',
                'op': 'Unsqueeze'
            },
            **const('unsqueeze_axis', int64_array(0)),
            'gather': {
                'type': 'Gather',
                'kind': 'op',
                'op': 'Gather'
            },
            **const('gather_axis', int64_array(1)),
            'squeeze': {
                'type': 'Squeeze',
                'kind': 'op',
                'op': 'Squeeze'
            },
            **const('squeeze_axis', int64_array(1)),
        }

        graph = build_graph(nodes_attributes, [
            ('input', 'strided_slice', {
                'out': 0,
                'in': 0
            }),
            ('begin_1', 'begin', {
                'out': 0,
                'in': 0
            }),
            ('index', 'begin', {
                'out': 0,
                'in': 1
            }),
            ('begin_3', 'begin', {
                'out': 0,
                'in': 2
            }),
            ('begin', 'strided_slice', {
                'out': 0,
                'in': 1
            }),
            ('end_1', 'end', {
                'out': 0,
                'in': 0
            }),
            ('index', 'add', {
                'out': 0,
                'in': 0
            }),
            ('slice_size', 'add', {
                'out': 0,
                'in': 1
            }),
            ('add', 'end', {
                'out': 0,
                'in': 1
            }),
            ('end_3', 'end', {
                'out': 0,
                'in': 2
            }),
            ('end', 'strided_slice', {
                'out': 0,
                'in': 2
            }),
            ('step', 'strided_slice', {
                'out': 0,
                'in': 3
            }),
            ('strided_slice', 'result', {
                'out': 0,
                'in': 0
            }),
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        NonConstBeginStridedSliceReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes, [
            ('input', 'gather', {
                'out': 0,
                'in': 0
            }),
            ('gather_axis', 'gather', {
                'out': 0,
                'in': 2
            }),
            ('index', 'unsqueeze', {
                'out': 0,
                'in': 0
            }),
            ('unsqueeze_axis', 'unsqueeze', {
                'out': 0,
                'in': 1
            }),
            ('unsqueeze', 'gather', {
                'out': 0,
                'in': 1
            }),
            ('gather', 'squeeze', {
                'out': 0,
                'in': 0
            }),
            ('squeeze_axis', 'squeeze', {
                'out': 0,
                'in': 1
            }),
            ('squeeze', 'result', {
                'out': 0,
                'in': 0
            }),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'result',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
        self.assertTrue(graph.node[graph.get_nodes_with_attributes(
            op='Squeeze')[0]]['name'] == 'non_const_begin_strided_slice')
    def test2_not_applied_transform(self):
        # the transformation is not applied if begin and end are constant
        nodes_attributes = {
            # nodes from original graph
            'input': {
                'type': 'Parameter',
                'kind': 'op',
                'op': 'Parameter'
            },
            'begin': {
                'type': 'Pack',
                'kind': 'op',
                'op': 'Pack'
            },
            **const('begin_1', int64_array(0)),
            **const('begin_2', int64_array(0)),
            **const('begin_3', int64_array(0)),
            'end': {
                'type': 'Pack',
                'kind': 'op',
                'op': 'Pack'
            },
            **const('end_1', int64_array(0)),
            **const('end_2', int64_array(3)),
            **const('end_3', int64_array(0)),
            **const('step', int64_array([1, 1, 1])),
            'strided_slice': {
                'type': 'StridedSlice',
                'kind': 'op',
                'op': 'StridedSlice',
                'begin_mask': int64_array([0, 1, 0]),
                'end_mask': int64_array([0, 1, 0]),
                'shrink_axis_mask': int64_array([0, 1, 0]),
                'name': 'non_const_begin_strided_slice'
            },
            'result': {
                'type': 'Result',
                'kind': 'op',
                'op': 'Result'
            },
        }

        graph = build_graph(nodes_attributes, [
            ('input', 'strided_slice', {
                'out': 0,
                'in': 0
            }),
            ('begin_1', 'begin', {
                'out': 0,
                'in': 0
            }),
            ('begin_2', 'begin', {
                'out': 0,
                'in': 1
            }),
            ('begin_3', 'begin', {
                'out': 0,
                'in': 2
            }),
            ('begin', 'strided_slice', {
                'out': 0,
                'in': 1
            }),
            ('end_1', 'end', {
                'out': 0,
                'in': 0
            }),
            ('end_2', 'end', {
                'out': 0,
                'in': 1
            }),
            ('end_3', 'end', {
                'out': 0,
                'in': 2
            }),
            ('end', 'strided_slice', {
                'out': 0,
                'in': 2
            }),
            ('step', 'strided_slice', {
                'out': 0,
                'in': 3
            }),
            ('strided_slice', 'result', {
                'out': 0,
                'in': 0
            }),
        ],
                            nodes_with_edges_only=True)
        graph.stage = 'front'
        NonConstBeginStridedSliceReplacement().find_and_replace_pattern(graph)

        graph_ref = build_graph(nodes_attributes, [('input', 'strided_slice', {
            'out': 0,
            'in': 0
        }), ('begin_1', 'begin', {
            'out': 0,
            'in': 0
        }), ('begin_2', 'begin', {
            'out': 0,
            'in': 1
        }), ('begin_3', 'begin', {
            'out': 0,
            'in': 2
        }), ('begin', 'strided_slice', {
            'out': 0,
            'in': 1
        }), ('end_1', 'end', {
            'out': 0,
            'in': 0
        }), ('end_2', 'end', {
            'out': 0,
            'in': 1
        }), ('end_3', 'end', {
            'out': 0,
            'in': 2
        }), ('end', 'strided_slice', {
            'out': 0,
            'in': 2
        }), ('step', 'strided_slice', {
            'out': 0,
            'in': 3
        }), ('strided_slice', 'result', {
            'out': 0,
            'in': 0
        })],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'result',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
        'type': 'Greater',
        'kind': 'op',
        'op': 'Greater'
    },
    'mul': {
        'type': 'Multiply',
        'kind': 'op',
        'op': 'Mul',
        'name': 'my_trelu'
    },
    'squeeze2': {
        'type': 'Squeeze',
        'kind': 'op',
        'op': 'Squeeze'
    },
    **const('alpha', float_array([0.75])),
}


class ThresholdedReluDecompositionTest(unittest.TestCase):
    def test_trelu(self):
        graph = build_graph(nodes_attributes, [
            ('parameter', 'trelu', {
                'in': 0,
                'out': 0
            }),
            ('trelu', 'result', {
                'in': 0,
                'out': 0
            }),
        ],