def test(self): nodes = { **regular_op('input', {'type': 'Parameter'}), **const('depth', int64_array([2])), **regular_op('onehot', {'type': 'OneHot', 'kind': 'op', 'op': 'OneHot'}), **regular_op('reshape', {'type': 'Reshape', 'kind': 'op', 'op': 'Reshape'}), **const('reshape_dims', int64_array([])), **result('result'), } edges = [('input', 'onehot'), ('depth', 'onehot'), ('onehot', 'result'), ] graph = build_graph(nodes, edges) graph.graph['layout'] = 'NCHW' graph.stage = 'front' edges_ref = [('input', 'onehot'), ('depth', 'reshape'), ('reshape_dims', 'reshape'), ('reshape', 'onehot'), ('onehot', 'result'), ] graph_ref = build_graph(nodes, edges_ref) OneHotDepthNormalizer().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_one(self): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('some_op', {'type': 'SomeOp', 'name': 'some_op_name'}), **regular_op('fake_output', {'type': None, 'kind': 'op', 'op': 'FakeOutput', 'name': 'my_output_name'}), **result('result'), } edges = [('input', 'some_op'), ('some_op', 'fake_output'), ('fake_output', 'result'), ] graph = build_graph(nodes, edges) graph.graph['layout'] = 'NCHW' graph.stage = 'front' edges_ref = [('input', 'some_op'), ('some_op', 'result'), ] graph_ref = build_graph(nodes, edges_ref, {'some_op': {'name': 'my_output_name'}}) FakeOutputResolver().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_swish_with_sigmoid_without_beta_different_tensors(self): graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('sigmoid', {'op': 'Sigmoid'}), **regular_op('mul', { 'op': 'Mul', 'name': 'final_mul' }), **result('result'), }, [('input_2', 'mul', { 'in': 0, 'out': 0 }), ('input', 'sigmoid', { 'in': 0, 'out': 0 }), ('sigmoid', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'result', { 'in': 0, 'out': 0 })], {}) graph_ref = graph.copy() graph.stage = 'front' SwishWithSigmoidWithoutBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
class SoftplusFusionTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('exp', {'op': 'Exp'}), **const('const_1', float_array([1.0])), **regular_op('add', {'op': 'Add'}), **regular_op('ln', { 'op': 'Log', 'name': 'final_log' }), **result('result'), } edges = [('input', 'exp', { 'in': 0, 'out': 0 }), ('const_1', 'add', { 'in': 0, 'out': 0 }), ('exp', 'add', { 'in': 1, 'out': 0 }), ('add', 'ln', { 'in': 0, 'out': 0 }), ('ln', 'result', { 'in': 0, 'out': 0 })] def test_softplus_fusion_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' SoftplusFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( len(graph.get_op_nodes(name='final_log')) == 1 and graph.get_op_nodes(name='final_log')[0].op == 'SoftPlus') def test_softplus_fusion_test_wrong_const(self): graph = build_graph_with_edge_attrs( self.nodes, self.edges, {'const_1': { 'value': float_array([0.9999]) }}) graph_ref = graph.copy() graph.stage = 'front' SoftplusFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
class SwishWithSigmoidWithBetaTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('beta', {'type': 'Parameter'}), **regular_op('mul_beta', {'op': 'Mul'}), **regular_op('sigmoid', {'op': 'Sigmoid'}), **regular_op('mul_2', {'op': 'Mul', 'name': 'final_mul'}), **result('result'), } edges = [('input', 'mul_beta', {'in': 0, 'out': 0}), ('input', 'mul_2', {'in': 0, 'out': 0}), ('beta', 'mul_beta', {'in': 1, 'out': 0}), ('mul_beta', 'sigmoid', {'in': 0, 'out': 0}), ('sigmoid', 'mul_2', {'in': 1, 'out': 0}), ('mul_2', 'result', {'in': 0, 'out': 0})] def test_swish_with_sigmoid_with_beta_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) new_ref_nodes = ref_nodes.copy() new_ref_nodes.update(**regular_op('beta', {'type': 'Parameter'})) graph_ref = build_graph(new_ref_nodes, ref_edges + [('beta', 'swish')]) graph.stage = 'front' SwishWithSigmoidWithBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Swish') def test_swish_with_sigmoid_with_beta_different_tensors(self): graph = build_graph_with_edge_attrs({ **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('beta', {'type': 'Parameter'}), **regular_op('mul_beta', {'op': 'Mul'}), **regular_op('sigmoid', {'op': 'Sigmoid'}), **regular_op('mul_2', {'op': 'Mul', 'name': 'final_mul'}), **result('result'), }, [('input', 'mul_beta', {'in': 0, 'out': 0}), ('input_2', 'mul_2', {'in': 0, 'out': 0}), ('beta', 'mul_beta', {'in': 1, 'out': 0}), ('mul_beta', 'sigmoid', {'in': 0, 'out': 0}), ('sigmoid', 'mul_2', {'in': 1, 'out': 0}), ('mul_2', 'result', {'in': 0, 'out': 0})], {}) graph_ref = graph.copy() graph.stage = 'front' SwishWithSigmoidWithBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def setUp(self): self.start_node_name = 'StatefulPartitionedCall/Preprocessor/unstack' self.end_node_name = 'StatefulPartitionedCall/Preprocessor/stack' self.end_node_name2 = 'StatefulPartitionedCall/Preprocessor/stack2' self.loop_start_node_name = 'prefix/map/while/Preprocessor/unstack' self.loop_end_node_name = 'prefix/map/while/Preprocessor/stack' self.mul_const = float32_array([0.025, 0.374, -0.45]) self.sub_const = float32_array([2.0, 3.0, 4.0]) self.nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('mul', {'op': 'Mul', 'type': 'Multiply', 'name': 'my_mul'}), **regular_op('sub', {'op': 'Sub', 'type': 'Subtract', 'name': 'my_sub'}), **const('mul_const', self.mul_const), **const('sub_const', self.sub_const), **regular_op(self.start_node_name, {'op': 'Identity'}), **regular_op(self.end_node_name, {'op': 'Identity'}), **regular_op(self.end_node_name2, {'op': 'Identity'}), **regular_op('loop', {'op': 'Loop', 'body': None}), **regular_op('resize', {'type': 'Interpolate'}), **result('result'), } self.replacement_desc = {'start_nodes': [self.start_node_name], 'end_nodes': [self.end_node_name, self.end_node_name2]}
def generate_offsets(): offset_edges = [] offset_nodes = {} for i, t in enumerate(time_offsets): offset_nodes.update(**regular_op('memoryoffset_' + str(i), {'type': None})) if t != 0: offset_edges.append( ('placeholder', 'memoryoffset_' + str(i), { 'out': 0, 'in': 0 })) offset_edges.append(('memoryoffset_' + str(i), 'concat', { 'out': 0, 'in': i })) else: offset_edges.append(('placeholder', 'concat', { 'out': 0, 'in': i })) return offset_nodes, offset_edges
def test(self): nodes = { **const('weights_inp', np.random.randn(100, 2)), **regular_op('indices_inp', {'type': 'Parameter'}), **regular_op('offsets_inp', {'type': 'Parameter'}), **regular_op( 'aten', { 'type': None, 'kind': 'op', 'op': 'ATen', 'operator': 'embedding_bag', 'mode': 0, 'name': 'my_aten' }), **regular_op( 'emb_bag', { 'type': 'EmbeddingBagOffsetsSum', 'kind': 'op', 'op': 'EmbeddingBagOffsetsSum' }), **result('result'), } edges = [ ('weights_inp', 'aten'), ('indices_inp', 'aten'), ('offsets_inp', 'aten'), ('aten', 'result'), ] graph = build_graph(nodes, edges) graph.graph['layout'] = 'NCHW' graph.stage = 'front' edges_ref = [ ('weights_inp', 'emb_bag'), ('indices_inp', 'emb_bag'), ('offsets_inp', 'emb_bag'), ('emb_bag', 'result'), ] graph_ref = build_graph(nodes, edges_ref) AtenToEmbeddingBag().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_hsigmoid_with_relu_mul_different_tensors(self): graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('add', {'op': 'Add'}), **regular_op('max', {'op': 'Maximum'}), **regular_op('min', {'op': 'Minimum'}), **regular_op('mul', {'op': 'Mul'}), **regular_op('mul_2', { 'op': 'Mul', 'name': 'final_mul' }), **const('const_0', float_array([0.0])), **const('const_3', float_array([3.0])), **const('const_6', float_array([6.0])), **const('const_1_6', float_array([1.0 / 6.0])), **result('result'), }, [('input_2', 'mul', { 'in': 1, 'out': 0 }), ('input', 'add', { 'in': 0, 'out': 0 }), ('const_3', 'add', { 'in': 1, 'out': 0 }), ('add', 'max', { 'in': 0, 'out': 0 }), ('const_0', 'max', { 'in': 1, 'out': 0 }), ('max', 'min', { 'in': 0, 'out': 0 }), ('const_6', 'min', { 'in': 1, 'out': 0 }), ('min', 'mul', { 'in': 0, 'out': 0 }), ('mul', 'mul_2', { 'in': 0, 'out': 0 }), ('const_1_6', 'mul_2', { 'in': 1, 'out': 0 }), ('mul_2', 'result', { 'in': 0, 'out': 0 })]) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_reduce_axis_is_None(self): graph = build_graph(nodes, edges, nodes_with_edges_only=True) graph.stage = 'front' ReduceAxisNormalizer().find_and_replace_pattern(graph) ref_nodes = nodes.copy() ref_nodes.update({ **regular_op('rank', { 'op': 'Rank', 'type': None }), **regular_op('range', { 'op': 'Range', 'type': 'Range' }), **regular_op('begin', { 'type': 'Const', 'value': int64_array([0]) }), **regular_op('step', { 'type': 'Const', 'value': int64_array([1]) }), }) graph_ref = build_graph(ref_nodes, [ *edges, *connect_front('parameter:0', 'rank'), *connect_front('begin:0', '0:range'), *connect_front('rank:0', '1:range'), *connect_front('step:0', '2:range'), *connect_front('range:0', '1:reduce'), ], nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test(self): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('shape', {'type': 'ShapeOf', 'kind': 'op', 'op': 'ShapeOf'}), **regular_op('random_uniform', {'type': 'RandomUniform', 'kind': 'op', 'op': 'RandomUniform', 'name': 'dropout/RU'}), **regular_op('mul', {'type': 'Mul', 'kind': 'op', 'op': 'Mul'}), **regular_op('add', {'type': 'Add', 'kind': 'op', 'op': 'Add'}), **regular_op('add2', {'type': 'Add', 'kind': 'op', 'op': 'Add'}), **regular_op('floor', {'type': 'Floor', 'kind': 'op', 'op': 'Floor'}), 'add_const': {'kind': 'op', 'op': 'Const', 'value': np.array(0.0), 'data_type': np.float32}, **result('result'), # new nodes to be added 'broadcast_const': {'kind': 'op', 'op': 'Const', 'value': np.array(0.5), 'data_type': np.float32}, **regular_op('broadcast', {'type': 'Broadcast', 'kind': 'op', 'op': 'Broadcast'}), } edges = [('input', 'shape'), ('shape', 'random_uniform'), ('random_uniform', 'mul'), ('mul', 'add'), ('add_const', 'add'), ('add', 'add2'), ('add2', 'floor'), ('floor', 'result')] graph = build_graph(nodes, edges, nodes_with_edges_only=True) graph.graph['layout'] = 'NCHW' graph.stage = 'front' DropoutWithRandomUniformReplacer().find_and_replace_pattern(graph) edges_ref = [('input', 'shape'), ('broadcast_const', 'broadcast'), ('shape', 'broadcast'), ('broadcast', 'mul'), ('mul', 'add'), ('add_const', 'add'), ('add', 'add2'), ('add2', 'floor'), ('floor', 'result')] graph_ref = build_graph(nodes, edges_ref, nodes_with_edges_only=True) # check graph structure after the transformation and output name (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(graph.node[graph.get_nodes_with_attributes(op='Broadcast')[0]]['name'] == 'dropout/RU')
def test_mish_fusion_different_source(self): # check case when different tensors goes to Mul and SoftPlus graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('softplus', {'op': 'SoftPlus'}), **regular_op('tanh', {'op': 'Tanh'}), **regular_op('mul', { 'op': 'Mul', 'name': 'final_mul' }), **result('result'), }, [('input', 'softplus', { 'in': 0, 'out': 0 }), ('input_2', 'mul', { 'in': 0, 'out': 0 }), ('softplus', 'tanh', { 'in': 0, 'out': 0 }), ('tanh', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'result', { 'in': 0, 'out': 0 })], {}) graph_ref = graph.copy() graph.stage = 'front' MishFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_swish_with_sigmoid_with_beta_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) new_ref_nodes = ref_nodes.copy() new_ref_nodes.update(**regular_op('beta', {'type': 'Parameter'})) graph_ref = build_graph(new_ref_nodes, ref_edges + [('beta', 'swish')]) graph.stage = 'front' SwishWithSigmoidWithBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Swish')
def build_body_graph(pre_processing: str): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('mul', {'op': 'Mul', 'type': 'Multiply', 'name': 'my_body_mul'}), **regular_op('sub', {'op': 'Sub', 'type': 'Subtract', 'name': 'my_body_sub'}), **const('body_mul_const', self.mul_const), **const('body_sub_const', self.sub_const), **regular_op(self.loop_start_node_name, {'op': 'Identity'}), **regular_op(self.loop_end_node_name, {'op': 'Identity'}), **regular_op('resize', {'type': 'Interpolate'}), **result('result'), } edges = None if pre_processing == 'no': edges = [*connect_front('input', self.loop_start_node_name), *connect_front(self.loop_start_node_name, 'resize'), *connect_front('resize', self.loop_end_node_name), *connect_front(self.loop_end_node_name, 'result'), ] elif pre_processing == 'trailing': edges = [*connect_front('input', self.loop_start_node_name), *connect_front(self.loop_start_node_name, 'resize'), *connect_front('resize', self.loop_end_node_name), *connect_front(self.loop_end_node_name, '0:mul'), *connect_front('body_mul_const', '1:mul'), *connect_front('body_sub_const', '0:sub'), *connect_front('mul', '1:sub'), *connect_front('sub', 'result'), ] else: edges = [*connect_front('input', '0:mul'), *connect_front('body_mul_const', '1:mul'), *connect_front('body_sub_const', '0:sub'), *connect_front('mul', '1:sub'), *connect_front('sub', self.loop_start_node_name), *connect_front(self.loop_start_node_name, 'resize'), *connect_front('resize', self.loop_end_node_name), *connect_front(self.loop_end_node_name, 'result'), ] graph = build_graph(nodes, edges, nodes_with_edges_only=True) graph.stage = 'front' return graph
def test_multi(self): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('some_op', {'type': 'SomeOp', 'name': 'some_op_name'}), **regular_op('fake_output1', {'type': None, 'kind': 'op', 'op': 'FakeOutput', 'name': 'my_output_name1'}), **regular_op('fake_output2', {'type': None, 'kind': 'op', 'op': 'FakeOutput', 'name': 'my_output_name2'}), **const('const1', int64_array(0)), **const('const2', int64_array(0)), **regular_op('add1', {'type': None, 'kind': 'op', 'op': 'Add', 'name': 'my_output_name1'}), **regular_op('add2', {'type': None, 'kind': 'op', 'op': 'Add', 'name': 'my_output_name2'}), **result('result1'), **result('result2'), } edges = [('input', 'some_op'), ('some_op', 'fake_output1'), ('some_op', 'fake_output2'), ('fake_output1', 'result1'), ('fake_output2', 'result2'), ] graph = build_graph(nodes, edges) graph.graph['layout'] = 'NCHW' graph.stage = 'front' edges_ref = [('input', 'some_op'), ('some_op', 'add1'), ('const1', 'add1'), ('some_op', 'add2'), ('const2', 'add2'), ('add1', 'result1'), ('add2', 'result2'), ] graph_ref = build_graph(nodes, edges_ref) FakeOutputResolver().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result1') self.assertTrue(flag, resp)
class MishFusionTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('softplus', {'op': 'SoftPlus'}), **regular_op('tanh', {'op': 'Tanh'}), **regular_op('mul', { 'op': 'Mul', 'name': 'final_mul' }), **result('result'), } edges = [('input', 'softplus', { 'in': 0, 'out': 0 }), ('input', 'mul', { 'in': 0, 'out': 0 }), ('softplus', 'tanh', { 'in': 0, 'out': 0 }), ('tanh', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'result', { 'in': 0, 'out': 0 })] def test_mish_fusion(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' MishFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Mish') def test_mish_fusion_different_source(self): # check case when different tensors goes to Mul and SoftPlus graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('softplus', {'op': 'SoftPlus'}), **regular_op('tanh', {'op': 'Tanh'}), **regular_op('mul', { 'op': 'Mul', 'name': 'final_mul' }), **result('result'), }, [('input', 'softplus', { 'in': 0, 'out': 0 }), ('input_2', 'mul', { 'in': 0, 'out': 0 }), ('softplus', 'tanh', { 'in': 0, 'out': 0 }), ('tanh', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'result', { 'in': 0, 'out': 0 })], {}) graph_ref = graph.copy() graph.stage = 'front' MishFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from extensions.front.Mish_fusion import MishFusion from mo.utils.ir_engine.compare_graphs import compare_graphs from mo.utils.unittest.graph import build_graph, regular_op, result, build_graph_with_edge_attrs ref_nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('mish', { 'type': 'Mish', 'name': 'final_mul' }), **result('result') } ref_edges = [('input', 'mish'), ('mish', 'result')] class MishFusionTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('softplus', {'op': 'SoftPlus'}), **regular_op('tanh', {'op': 'Tanh'}), **regular_op('mul', {
# Copyright (C) 2018-2021 Intel Corporation # SPDX-License-Identifier: Apache-2.0 import unittest import numpy as np from extensions.front.reduce_axis_normalizer import ReduceAxisNormalizer from mo.front.common.partial_infer.utils import int64_array from mo.utils.ir_engine.compare_graphs import compare_graphs from mo.utils.unittest.graph import build_graph, result, connect_front, regular_op nodes = { **regular_op('parameter', {'type': 'Parameter'}), **regular_op('reduce', { 'op': 'ReduceSum', 'axis': None }), **regular_op('axis', { 'op': 'Const', 'type': 'Const', 'value': int64_array([1]) }), **result(), } edges = [ *connect_front('parameter:0', '0:reduce'), *connect_front('reduce', 'output'), ]
def test_per_sample_weights(self): nodes = { **const('weights_inp', np.random.randn(100, 2)), **regular_op('indices_inp', {'type': 'Parameter'}), **regular_op('offsets_inp', {'type': 'Parameter'}), **regular_op('per_sample_weights', {'type': 'Parameter'}), **regular_op( 'aten', { 'type': None, 'kind': 'op', 'op': 'ATen', 'operator': 'embedding_bag', 'mode': 0, 'name': 'my_aten' }), **regular_op( 'emb_bag', { 'type': 'EmbeddingBagOffsetsSum', 'kind': 'op', 'op': 'EmbeddingBagOffsetsSum' }), **regular_op('WeightsRank', { 'type': None, 'kind': 'op', 'op': 'Rank' }), **regular_op('WeightsRank/axis', { 'type': 'Add', 'kind': 'op', 'op': 'Add' }), **regular_op('gather1', { 'type': 'Gather', 'kind': 'op', 'op': 'Gather' }), **regular_op('gather2', { 'type': 'Gather', 'kind': 'op', 'op': 'Gather' }), **regular_op('WeightsShape', { 'type': 'ShapeOf', 'kind': 'op', 'op': 'ShapeOf' }), **regular_op('Broadcast', { 'type': 'Broadcast', 'kind': 'op', 'op': 'Broadcast' }), **regular_op('Unsqueeze', { 'type': 'Unsqueeze', 'kind': 'op', 'op': 'Unsqueeze' }), **const('WeightsShape/Axis', int64_array(0)), **const('zero1', int64_array(0)), **const('zero2', int64_array(0)), **const('Unsqueeze/value', int64_array(0)), **const('Broadcast/value', int64_array(0)), **const('neg', int64_array(-1)), **regular_op('Concat', { 'type': 'Concat', 'kind': 'op', 'op': 'Concat' }), **result('result'), } edges = [ ('weights_inp', 'aten'), ('indices_inp', 'aten'), ('offsets_inp', 'aten'), ('per_sample_weights', 'aten'), ('aten', 'result'), ] graph = build_graph(nodes, edges, nodes_with_edges_only=True) graph.graph['layout'] = 'NCHW' graph.stage = 'front' edges_ref = [ ('weights_inp', 'Concat', { 'in': 0, 'out': 0 }), ('weights_inp', 'WeightsShape', { 'in': 0, 'out': 0 }), ('weights_inp', 'WeightsRank', { 'in': 0, 'out': 0 }), ('WeightsRank', 'WeightsRank/axis'), ('neg', 'WeightsRank/axis'), ('WeightsShape', 'gather1', { 'in': 0, 'out': 0 }), ('WeightsRank/axis', 'gather1'), ('WeightsShape/Axis', 'gather1'), ('WeightsShape', 'gather2', { 'in': 0, 'out': 0 }), ('zero1', 'gather2'), ('zero2', 'gather2'), ('Broadcast/value', 'Broadcast'), ('gather1', 'Broadcast'), ('Broadcast', 'Unsqueeze'), ('Unsqueeze/value', 'Unsqueeze'), ('Unsqueeze', 'Concat'), ('Concat', 'emb_bag'), ('indices_inp', 'emb_bag'), ('offsets_inp', 'emb_bag'), ('gather2', 'emb_bag'), ('per_sample_weights', 'emb_bag'), ('emb_bag', 'result'), ] graph_ref = build_graph(nodes, edges_ref, nodes_with_edges_only=True) AtenToEmbeddingBag().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_tdnnreplacer(self, weights, biases, time_offsets): def generate_offsets(): offset_edges = [] offset_nodes = {} for i, t in enumerate(time_offsets): offset_nodes.update(**regular_op('memoryoffset_' + str(i), {'type': None})) if t != 0: offset_edges.append( ('placeholder', 'memoryoffset_' + str(i), { 'out': 0, 'in': 0 })) offset_edges.append(('memoryoffset_' + str(i), 'concat', { 'out': 0, 'in': i })) else: offset_edges.append(('placeholder', 'concat', { 'out': 0, 'in': i })) return offset_nodes, offset_edges offset_nodes, ref_offset_edges = generate_offsets() nodes = { **offset_nodes, **regular_op('placeholder', {'type': 'Parameter'}), **regular_op( 'tdnncomponent', { 'op': 'tdnncomponent', 'weights': np.array(weights), 'biases': np.array(biases), 'time_offsets': np.array(time_offsets) }), **const('weights', np.array(weights)), **const('biases', np.array(biases)), **regular_op('concat', { 'type': 'Concat', 'axis': 1 }), **regular_op('memoryoffset_0', {'type': None}), **regular_op('memoryoffset_1', {'type': None}), **regular_op('memoryoffset_2', {'type': None}), **regular_op('fully_connected', {'type': 'FullyConnected'}), **result('result'), } graph = build_graph(nodes, [ *connect_front('placeholder', 'tdnncomponent'), *connect_front('tdnncomponent', 'result') ], nodes_with_edges_only=True) graph.stage = 'front' ref_graph = build_graph(nodes, [ *ref_offset_edges, *connect_front('concat', '0:fully_connected'), *connect_front('weights', '1:fully_connected'), *connect_front('biases', '2:fully_connected'), *connect_front('fully_connected', 'result') ], nodes_with_edges_only=True) TdnnComponentReplacer().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, ref_graph, 'result', check_op_attrs=True) self.assertTrue(flag, resp)
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from extensions.front.Softplus_fusion import SoftplusFusion from mo.front.common.partial_infer.utils import float_array from mo.utils.ir_engine.compare_graphs import compare_graphs from mo.utils.unittest.graph import build_graph, const, regular_op, result, build_graph_with_edge_attrs ref_nodes = {**regular_op('input', {'type': 'Parameter'}), **regular_op('softplus', {'type': 'SoftPlus', 'name': 'final_log'}), **result('result') } ref_edges = [('input', 'softplus'), ('softplus', 'result')] class SoftplusFusionTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('exp', {'op': 'Exp'}), **const('const_1', float_array([1.0])), **regular_op('add', {'op': 'Add'}), **regular_op('ln', {'op': 'Log', 'name': 'final_log'}), **result('result'), }
class GeLUMergerErfTest(unittest.TestCase): nodes = { **regular_op('input', { 'op': 'Parameter', 'type': 'Parameter' }), **regular_op('mul', {'op': 'Mul'}), **regular_op('mul0', { 'op': 'Mul', 'name': 'final_mul' }), **regular_op('div', {'op': 'Div'}), **regular_op('erf', {'op': 'Erf'}), **regular_op('add', {'op': 'Add'}), **const('mul_param', float_array([0.5])), **const('div_param', float_array([sqrt(2.)])), **const('add_param', int64_array([1])), **result('result'), } def test_gelu_p1(self): edges = [('input', 'mul'), ('mul', 'mul0'), ('input', 'div'), ('div', 'erf'), ('erf', 'add'), ('add', 'mul0'), ('mul_param', 'mul'), ('div_param', 'div'), ('add_param', 'add'), ('mul0', 'result')] graph = build_graph(self.nodes, edges) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' GeLUMergerErf().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( graph.get_op_nodes(op='Gelu')[0].approximation == 'erf') self.assertTrue( len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu') def test_gelu_p2(self): edges = [('input', 'mul'), ('div', 'erf'), ('erf', 'add'), ('add', 'mul'), ('mul', 'mul0'), ('mul_param', 'mul0'), ('div_param', 'div'), ('add_param', 'add'), ('mul0', 'result')] graph = build_graph(self.nodes, edges) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' GeLUMergerErf().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( graph.get_op_nodes(op='Gelu')[0].approximation == 'erf') self.assertTrue( len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu') def test_gelu_p3(self): edges = [('input', 'mul'), ('div', 'erf'), ('erf', 'add'), ('add', 'mul'), ('mul', 'mul0'), ('mul_param', 'mul'), ('div_param', 'div'), ('add_param', 'add'), ('mul0', 'result')] graph = build_graph(self.nodes, edges) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' GeLUMergerErf().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( graph.get_op_nodes(op='Gelu')[0].approximation == 'erf') self.assertTrue( len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Gelu')
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from math import sqrt from extensions.front.GeLUMerger_Erf import GeLUMergerErf from mo.front.common.partial_infer.utils import float_array, int64_array from mo.utils.ir_engine.compare_graphs import compare_graphs from mo.utils.unittest.graph import build_graph, const, regular_op, result, build_graph ref_nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('gelu', { 'type': 'Gelu', 'approximation': 'erf', 'name': 'final_mul' }), **result('result') } ref_edges = [('input', 'gelu'), ('gelu', 'result')] class GeLUMergerErfTest(unittest.TestCase): nodes = { **regular_op('input', { 'op': 'Parameter', 'type': 'Parameter'
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from extensions.front.HSigmoid_fusion import HSigmoidWithClamp, HSigmoidWithMinMax, HSigmoidWithReluDiv, \ HSigmoidWithReluMul from mo.front.common.partial_infer.utils import float_array from mo.utils.ir_engine.compare_graphs import compare_graphs from mo.utils.unittest.graph import build_graph, const, regular_op, result, build_graph_with_edge_attrs ref_nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('hsigmoid', { 'type': 'HSigmoid', 'name': 'final_mul' }), **result('result') } ref_edges = [('input', 'hsigmoid'), ('hsigmoid', 'result')] class HSigmoidWithClampTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('add', {'op': 'Add'}), **regular_op('relu6', {'op': 'Clamp'}), **regular_op('mul_2', {
class HSigmoidWithReluMulTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('add', {'op': 'Add'}), **regular_op('relu', {'op': 'ReLU'}), **regular_op('min', {'op': 'Minimum'}), **regular_op('mul', { 'op': 'Mul', 'name': 'final_mul' }), **const('add_const', float_array([3.0])), **const('min_const', float_array([6.0])), **const('mul_const', float_array([1.0 / 6.0])), **result('result'), } edges = [('input', 'add', { 'in': 0, 'out': 0 }), ('add_const', 'add', { 'in': 1, 'out': 0 }), ('add', 'relu', { 'in': 0, 'out': 0 }), ('relu', 'min', { 'in': 0, 'out': 0 }), ('min_const', 'min', { 'in': 1, 'out': 0 }), ('min', 'mul', { 'in': 0, 'out': 0 }), ('mul_const', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'result', { 'in': 0, 'out': 0 })] def test_hsigmoid_with_relu_mul(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'HSigmoid') self.assertTrue( graph.get_op_nodes( name='final_mul')[0].out_nodes()[0].node == 'result') def test_hsigmoid_with_relu_mul_wrong_constant(self): graph = build_graph_with_edge_attrs( self.nodes, self.edges, {'add_const': { 'value': float_array([0.00001]) }}) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) def test_hsigmoid_with_relu_mul_different_tensors(self): graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('add', {'op': 'Add'}), **regular_op('max', {'op': 'Maximum'}), **regular_op('min', {'op': 'Minimum'}), **regular_op('mul', {'op': 'Mul'}), **regular_op('mul_2', { 'op': 'Mul', 'name': 'final_mul' }), **const('const_0', float_array([0.0])), **const('const_3', float_array([3.0])), **const('const_6', float_array([6.0])), **const('const_1_6', float_array([1.0 / 6.0])), **result('result'), }, [('input_2', 'mul', { 'in': 1, 'out': 0 }), ('input', 'add', { 'in': 0, 'out': 0 }), ('const_3', 'add', { 'in': 1, 'out': 0 }), ('add', 'max', { 'in': 0, 'out': 0 }), ('const_0', 'max', { 'in': 1, 'out': 0 }), ('max', 'min', { 'in': 0, 'out': 0 }), ('const_6', 'min', { 'in': 1, 'out': 0 }), ('min', 'mul', { 'in': 0, 'out': 0 }), ('mul', 'mul_2', { 'in': 0, 'out': 0 }), ('const_1_6', 'mul_2', { 'in': 1, 'out': 0 }), ('mul_2', 'result', { 'in': 0, 'out': 0 })]) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from extensions.front.output_cut import OutputCut from mo.graph.graph import Node from mo.utils.unittest.graph import build_graph, regular_op nodes = { **regular_op('Parameter1', { 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter' }), **regular_op('Op1', { 'type': 'Op1', 'kind': 'op', 'op': 'Op1' }), **regular_op('Op2', { 'type': 'Op2', 'kind': 'op', 'op': 'Op2' }), **regular_op( 'FakeOutput1', { 'type': 'Identity',
http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from mo.graph.graph import Node from mo.utils.unittest.graph import build_graph, regular_op nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('Op1', { 'type': 'Op1', 'kind': 'op', 'op': 'Op1' }), **regular_op('Op2', { 'type': 'Op2', 'kind': 'op', 'op': 'Op2' }), **regular_op('Op3', { 'type': 'Op3', 'kind': 'op', 'op': 'Op3' }),
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from extensions.back.ResultRename import ResultRename from mo.graph.graph import Node from mo.utils.ir_engine.compare_graphs import compare_graphs from mo.utils.unittest.graph import build_graph, regular_op, result nodes = { **regular_op('Op1', { 'type': 'Op1', 'kind': 'op', 'op': 'Op1' }), **regular_op('Op2', { 'type': 'Op2', 'kind': 'op', 'op': 'Op2' }), **result('result1'), **result('result2'), 'Op1_data': { 'kind': 'data', 'fw_tensor_debug_info': [('Op1', 0, 'Op1_tensor')] }, 'Op2_data': { 'kind': 'data',
http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from extensions.front.Swish_fusion import SwishWithSigmoidWithoutBeta, SwishWithSigmoidWithBeta from mo.utils.ir_engine.compare_graphs import compare_graphs from mo.utils.unittest.graph import build_graph, regular_op, result, build_graph_with_edge_attrs ref_nodes = {**regular_op('input', {'type': 'Parameter'}), **regular_op('swish', {'type': 'Swish', 'name': 'final_mul'}), **result('result') } ref_edges = [('input', 'swish'), ('swish', 'result')] class SwishWithSigmoidWithoutBetaTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('sigmoid', {'op': 'Sigmoid'}), **regular_op('mul', {'op': 'Mul', 'name': 'final_mul'}), **result('result'), } edges = [('input', 'mul', {'in': 0, 'out': 0}),
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from math import sqrt from extensions.front.GeLUMerger_Erf import GeLUMergerErf from mo.front.common.partial_infer.utils import float_array, int64_array from mo.utils.ir_engine.compare_graphs import compare_graphs from mo.utils.unittest.graph import build_graph, const, regular_op, result, build_graph ref_nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('gelu', { 'type': 'Gelu', 'name': 'final_mul' }), **result('result') } ref_edges = [('input', 'gelu'), ('gelu', 'result')] class GeLUMergerErfTest(unittest.TestCase): nodes = { **regular_op('input', { 'op': 'Parameter', 'type': 'Parameter' }),