def test_leaky_relu_mul_multiple_consumers(self): # multiple consumers of Mul operation graph = build_graph_with_edge_attrs(nodes, edges, {}) additional_result = Result(graph, {'name': 'result_2'}).create_node() Node(graph, 'mul').out_port(0).connect(additional_result.in_port(0)) ref_nodes = { **regular_op_with_shaped_data('input', shape, { 'type': 'Parameter', 'op': 'Parameter' }), **regular_op_with_shaped_data('mul', shape, { 'type': 'Multiply', 'name': 'mul' }), **regular_op_with_shaped_data('max', shape, { 'type': 'Maximum', 'name': 'final_max' }), **valued_const_with_data('const', float_array([0.5])), **regular_op_with_shaped_data('leaky_relu', shape, { 'type': 'LeakyReLU', 'name': 'max_final', 'negative_slope': None }), **result('result'), **result('result_2') } ref_edges = [ *connect('input:0', '0:mul'), *connect('const', '1:mul'), *connect('max:0', 'result'), *connect('mul:0', 'result_2'), *connect_data('input', 'leaky_relu'), *connect('leaky_relu', 'result') ] graph_ref = build_graph_with_edge_attrs(ref_nodes, ref_edges) LeakyReLUFusion().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) (flag, resp) = compare_graphs(graph, graph_ref, 'result_2') self.assertTrue(flag, resp)
def test_axpy(self): nodes = { 'node_1': { 'kind': 'op', 'type': 'Identity', 'op': 'Parameter' }, 'node_2': { 'kind': 'op', 'type': 'Identity', 'op': 'Parameter' }, 'node_3': { 'kind': 'op', 'type': 'Identity', 'op': 'Parameter' }, 'axpy': { 'type': 'Axpy', 'kind': 'op', 'op': 'Axpy' }, 'node_4': { 'kind': 'op', 'type': 'Identity', 'op': 'Parameter' } } edges = [('node_1', 'axpy', { 'in': 0, 'out': 0 }), ('node_2', 'axpy', { 'in': 1, 'out': 0 }), ('node_3', 'axpy', { 'in': 2, 'out': 0 }), ('axpy', 'node_4', { 'in': 0, 'out': 0 })] graph = build_graph_with_edge_attrs(nodes, edges) node = Node(graph, 'axpy') replacer = AxpyToSSandAdd() replacer.replace_op(graph, node) scale_node = [ node for node, attrs in list(graph.nodes(data=True)) if attrs['type'] == 'ScaleShift' ] self.assertEqual(len(scale_node), 1) add_node = [ node for node, attrs in list(graph.nodes(data=True)) if attrs['type'] == 'Add' ] self.assertEqual(len(add_node), 1)
def test_hsigmoid_with_clamp_different_tensors(self): graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('add', {'op': 'Add'}), **regular_op('relu6', {'op': 'Clamp'}), **regular_op('mul', {'op': 'Mul'}), **regular_op('mul_2', { 'op': 'Mul', 'name': 'final_mul' }), **const('const_0', float_array([0.0])), **const('const_3', float_array([3.0])), **const('const_6', float_array([6.0])), **const('const_1_6', float_array([1.0 / 6.0])), **result('result'), }, [('input', 'mul', { 'in': 0, 'out': 0 }), ('input_2', 'add', { 'in': 0, 'out': 0 }), ('const_3', 'add', { 'in': 1, 'out': 0 }), ('add', 'relu6', { 'in': 0, 'out': 0 }), ('const_0', 'relu6', { 'in': 1, 'out': 0 }), ('const_6', 'relu6', { 'in': 2, 'out': 0 }), ('relu6', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'mul_2', { 'in': 0, 'out': 0 }), ('const_1_6', 'mul_2', { 'in': 1, 'out': 0 }), ('mul_2', 'result', { 'in': 0, 'out': 0 })]) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithClamp().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_hswish_with_clamp_wrong_constant(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {'const_0': {'value': float_array([0.00001])}}) graph_ref = graph.copy() graph.stage = 'front' HSwishWithClamp().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_hsigmoid_with_relu_mul_wrong_constant(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {'add_const': {'value': float_array([0.00001])}}) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_softplus_fusion_test_wrong_const(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {'const_1': {'value': float_array([0.9999])}}) graph_ref = graph.copy() graph.stage = 'front' SoftplusFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_softplus_fusion_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' SoftplusFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_log')) == 1 and graph.get_op_nodes(name='final_log')[0].op == 'SoftPlus')
def test_swish_with_sigmoid_without_beta_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' SwishWithSigmoidWithoutBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Swish')
def test_leaky_relu_not_applicable_non_scalar_const(self): # const value is not a scalar or 1D tensor with 1 element so the transformation is not applicable graph = build_graph_with_edge_attrs(nodes, edges, {}) Node(graph, 'const')['value'] = float_array([0.5, 0.7]) Node(graph, 'const_d')['value'] = float_array([0.5, 0.7]) graph_ref = graph.copy() LeakyReLUFusion().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_hswish_with_min_max(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' HSwishWithMinMax().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'HSwish')
def test_fifo_with_out_label_batch(self): nodes_no_label = { 'placeholder': { 'op': 'Parameter', 'data_type': np.int32, 'kind': 'op', 'shape': np.array(0) }, 'batch_join/fifo_queue': { 'op': 'FIFOQueueV2', 'name': 'batch_join/fifo_queue', 'shapes': np.array([[1, 2, 3]]), 'types': np.array([np.float32]), 'kind': 'op' }, 'batch_join': { 'op': 'QueueDequeueUpToV2', 'kind': 'op' }, 'image_batch': { 'op': 'Identity', 'data_type': np.float32, 'kind': 'op' }, } edges_no_label = [('placeholder', 'batch_join', { 'out': 0, 'in': 0 }), ('batch_join/fifo_queue', 'batch_join', { 'out': 0, 'in': 1 }), ('batch_join', 'image_batch', { 'out': 0, 'in': 0 })] graph = build_graph_with_edge_attrs(nodes_no_label, edges_no_label) tested_class = FIFOQueue() tested_class.find_and_replace_pattern(graph=graph) after_pattern = graph.nodes() self.assertEqual(2, len(after_pattern)) try: new_ph_dict = graph.node[[ u for u, v in graph.in_edges('image_batch') ][0]] except Exception as e: self.fail( "Can't get new placeholder. Broken edge. Additional information: {}" .format(e)) self.assertEqual(new_ph_dict['name'], 'batch_join/fifo_queue') self.assertTrue( np.array_equal(new_ph_dict['shape'], np.array([1, 2, 3])))
def test_hsigmoid_with_relu_mul(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'HSigmoid') self.assertTrue(graph.get_op_nodes(name='final_mul')[0].out_nodes()[0].node == 'result')
def test_leaky_relu_data_port_0(self): graph = build_graph_with_edge_attrs(nodes, edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) Node(graph_ref, 'leaky_relu')['negative_slope'] = 0.5 LeakyReLUFusion().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( len(graph.get_op_nodes(name='final_max')) == 1 and graph.get_op_nodes(name='final_max')[0].op == 'LeakyReLU')
def test_bn(self): bn_pb = FakeBNProtoLayer(FakeParam('eps', 0.0001)) mean = [1, 2.5, 3] var = [0.5, 0.1, 1.2] scale = [2.3, 3.4, 4.5] shift = [0.8, 0.6, 0.4] bn_bin = FakeBNBinLayer([ FakeParam('data', mean), FakeParam('data', var), FakeParam('data', scale), FakeParam('data', shift) ]) nodes = { 'node_1': { 'kind': 'op', 'type': 'Identity', 'op': 'Placeholder' }, 'bn': { 'type': 'BN', 'kind': 'op', 'op': 'BN', 'pb': bn_pb, 'model_pb': bn_bin }, 'node_2': { 'kind': 'op', 'type': 'Identity', 'op': 'Placeholder' } } edges = [('node_1', 'bn', {'in': 0}), ('bn', 'node_2', {'in': 0})] graph = build_graph_with_edge_attrs(nodes, edges) node = Node(graph, 'bn') replacer = BNToScaleShift() replacer.replace_op(graph, node) scale_node = [ node for node, attrs in list(graph.nodes(data=True)) if attrs['type'] == 'ScaleShift' ] self.assertEqual(len(scale_node), 1) scale_ref = np.array([1.11796412, 3.2272172, 4.74282367]) shift_ref = np.array([-2.07131747, -10.87253847, -20.14270653]) for i in range(len(mean)): self.assertAlmostEqual(graph.node[scale_node[0]]['scale'][i], scale_ref[i]) self.assertAlmostEqual(graph.node[scale_node[0]]['bias'][i], shift_ref[i])
def test_assert_cf_false(self): me_mock = Mock() nodes = { 'input_data': {'name': 'input', 'kind': 'data', 'executable': True}, 'assert': {'name': 'assert', 'type': 'Assert', 'value': None, 'kind': 'op', 'op': 'Assert'}, 'assert_data': {'name': 'output', 'value': False, 'kind': 'data', 'executable': True}} edges = [ ('input_data', 'assert', {'in': 0}), ('assert', 'assert_data', {'out': 0, 'control_flow_edge': False})] graph = build_graph_with_edge_attrs(nodes, edges) tested_class = Assert(graph=graph, attrs={}) node = Node(graph, 'assert') tested_class.assert_control_flow_infer(node=node, is_executable=True, mark_executability=me_mock) me_mock.assert_called_once_with('assert_data', False)
def test_swish_with_sigmoid_with_beta_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) new_ref_nodes = ref_nodes.copy() new_ref_nodes.update(**regular_op('beta', {'type': 'Parameter'})) graph_ref = build_graph(new_ref_nodes, ref_edges + [('beta', 'swish')]) graph.stage = 'front' SwishWithSigmoidWithBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Swish')
def test_input_user_data_repack_names_ports_in_out(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges) input, freeze_placeholder = input_user_data_repack( graph, ['Aa:1', '0:Bb'], None) self.assertDictEqual(input, { 'A': [{ 'shape': None, 'out': 1 }], 'B': [{ 'shape': None, 'in': 0 }] }) self.assertEqual(freeze_placeholder, None)
def test_switch_cf_infer_no_condition(self): me_mock = Mock() nodes = { 'tensor': { 'value': True, 'kind': 'data', 'executable': True }, 'pred_id': { 'value': None, 'kind': 'data', 'executable': True }, 'switch': { 'type': 'Switch', 'kind': 'op', 'op': 'Switch' }, 'switch_data_0': { 'value': None, 'kind': 'data', 'executable': True }, 'switch_data_1': { 'value': None, 'kind': 'data', 'executable': True } } edges = [('tensor', 'switch', { 'in': 0 }), ('pred_id', 'switch', { 'in': 1 }), ('switch', 'switch_data_0', { 'out': 0 }), ('switch', 'switch_data_1', { 'out': 1 })] graph = build_graph_with_edge_attrs(nodes, edges) tested_class = Switch(graph=graph, attrs={}) node = Node(graph, 'switch') tested_class.control_flow_infer(node, True, me_mock) # In this case we should mark all ports as executable me_mock.assert_has_calls( [call('switch_data_0', True), call('switch_data_1', True)], any_order=True)
def test_input_user_data_repack_names_to_ids_list(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges) input, freeze_placeholder = input_user_data_repack( graph, ['Aa', 'Bb'], None) self.assertDictEqual( input, { 'A': [{ 'shape': None, 'port': None }], 'B': [{ 'shape': None, 'port': None }] }) self.assertEqual(freeze_placeholder, None)
def test_switch_cf_false_both_ports(self): me_mock = Mock() nodes = { 'tensor': { 'value': True, 'kind': 'data', 'executable': True }, 'pred_id': { 'value': np.array(False), 'kind': 'data', 'executable': True }, 'switch': { 'type': 'Switch', 'kind': 'op', 'op': 'Switch' }, 'switch_data_0': { 'value': None, 'kind': 'data', 'executable': True }, 'switch_data_1': { 'value': None, 'kind': 'data', 'executable': True } } edges = [('tensor', 'switch', { 'in': 0 }), ('pred_id', 'switch', { 'in': 1 }), ('switch', 'switch_data_0', { 'out': 0 }), ('switch', 'switch_data_1', { 'out': 1 })] graph = build_graph_with_edge_attrs(nodes, edges) tested_class = Switch(graph=graph, attrs={}) node = Node(graph, 'switch') tested_class.control_flow_infer(node, True, me_mock) me_mock.assert_has_calls( [call('switch_data_0', True), call('switch_data_1', False)], any_order=True)
def test_input_and_freeze(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges) shape_1 = np.array([1, 160, 160, 3]) input, freeze_placeholder = input_user_data_repack( graph, shape_1, {'Bb': True}) self.assertDictEqual( input, { 'A': [{ 'shape': shape_1, 'port': None }], 'B': [{ 'shape': None, 'port': None }] }) self.assertDictEqual(freeze_placeholder, {'B': True})
def test_output_port_cut(self, output): nodes = {'A': {'op': 'Parameter', 'kind': 'op'}, 'B': {'op': 'Parameter', 'kind': 'op'}, 'C': {'type': 'Identity', 'kind': 'op', 'op': 'Identity'}, 'D': {'type': 'Identity', 'kind': 'op', 'op': 'Identity'}, 'E': {'type': 'Identity', 'kind': 'op', 'op': 'Identity'}, } edges = [ ('A', 'C', {'in': 0, 'out': 0}), ('B', 'C', {'in': 1, 'out': 0}), ('C', 'D', {'in': 0, 'out': 0}), ('C', 'E', {'in': 0, 'out': 1}) ] graph = build_graph_with_edge_attrs(nodes, edges) sinks = add_output_ops(graph, output) graph.clean_up() self.assertEqual(len(graph.nodes()), 2)
def test_output_port_cut(self, output): nodes = {'A': {'type': 'Identity', 'kind': 'op'}, 'B': {'type': 'Identity', 'kind': 'op'}, 'C': {'type': 'Identity', 'kind': 'op'}, 'D': {'type': 'Identity', 'kind': 'op'}, 'E': {'type': 'Identity', 'kind': 'op'}, } edges = [ ('A', 'C', {'in': 0, 'out': 0}), ('B', 'C', {'in': 1, 'out': 0}), ('C', 'D', {'in': 0, 'out': 0}), ('C', 'E', {'in': 0, 'out': 1}) ] graph = build_graph_with_edge_attrs(nodes, edges) sinks = add_output_ops(graph, output) eliminate.graph_clean_up(graph) self.assertEqual(len(Node(graph, 'C').out_nodes()), 1) self.assertEqual(len(Node(graph, 'C').in_nodes()), 2)
def test_get_fw_tensor_debug_info(self): nodes = { 'A': { 'type': 'Identity', 'kind': 'op' }, 'B': { 'type': 'Identity', 'kind': 'op' }, 'C': { 'type': 'Identity', 'kind': 'op' }, 'Ad': { 'value': None, 'kind': 'data', 'fw_tensor_debug_info': [('A', 0)] }, 'Bd': { 'value': None, 'kind': 'data', 'fw_tensor_debug_info': [('B', 0)] }, 'Cd': { 'value': None, 'kind': 'data' }, } edges = [('A', 'Ad', { 'out': 0 }), ('Ad', 'B', { 'in': 0 }), ('B', 'Bd', { 'out': 0 }), ('Bd', 'C', { 'in': 0 }), ('C', 'Cd', { 'out': 0 })] graph = build_graph_with_edge_attrs(nodes, edges) fw_debug_info = get_fw_tensor_debug_info(Node(graph, 'Cd')) self.assertEqual(len(fw_debug_info), 1) self.assertEqual(fw_debug_info[0], ('B', 0))
def test_bn(self): bn_pb = FakeBNProtoLayer(FakeParam('eps', 0.0001)) mean = [1, 2.5, 3] var = [0.5, 0.1, 1.2] scale = [2.3, 3.4, 4.5] shift = [0.8, 0.6, 0.4] bn_bin = FakeBNBinLayer([FakeParam('data', mean), FakeParam('data', var), FakeParam('data', scale), FakeParam('data', shift)]) nodes = [ ('input', {'kind': 'op', 'type': 'Identity', 'op': 'Identity'}), ('bn', {'type': 'BN', 'kind': 'op', 'op': 'BN', 'pb': bn_pb, 'model_pb': bn_bin}), ('output', {'kind': 'op', 'type': 'Identity', 'op': 'Identity'}), ] edges = [ ('input', 'bn', {'in': 0, 'out': 0}), ('bn', 'output', {'in': 0, 'out': 0}), ] graph = build_graph_with_attrs(nodes, edges) node = Node(graph, 'bn') graph.stage = 'front' BNToScaleShift().find_and_replace_pattern(graph) ref_nodes = { 'input': {'kind': 'op', 'type': 'Identity', 'op': 'Identity'}, 'scale': {'kind': 'op', 'type': 'Const', 'op': 'Const', 'value': np.array([1.11796412, 3.2272172, 4.74282367])}, 'shift': {'kind': 'op', 'type': 'Const', 'op': 'Const', 'value': np.array([-2.07131747, -10.87253847, -20.14270653])}, 'ss': {'type': 'ScaleShift', 'kind': 'op', 'op': 'ScaleShift'}, 'output': {'kind': 'op', 'type': 'Identity', 'op': 'Identity'}, } ref_edges = [ ('input', 'ss', {'in': 0, 'out': 0}), ('scale', 'ss', {'in': 1, 'out': 0}), ('shift', 'ss', {'in': 2, 'out': 0}), ('ss', 'output', {'in': 0, 'out': 0}), ] ref_graph = build_graph_with_edge_attrs(ref_nodes, ref_edges) (flag, resp) = compare_graphs(graph, ref_graph, 'input', check_op_attrs=True) self.assertTrue(flag, resp)
def test_swish_with_sigmoid_without_beta_different_tensors(self): graph = build_graph_with_edge_attrs({ **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('sigmoid', {'op': 'Sigmoid'}), **regular_op('mul', {'op': 'Mul', 'name': 'final_mul'}), **result('result'), }, [('input_2', 'mul', {'in': 0, 'out': 0}), ('input', 'sigmoid', {'in': 0, 'out': 0}), ('sigmoid', 'mul', {'in': 1, 'out': 0}), ('mul', 'result', {'in': 0, 'out': 0})], {}) graph_ref = graph.copy() graph.stage = 'front' SwishWithSigmoidWithoutBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def setUp(self): self.graph = build_graph_with_edge_attrs( { 'data_to_split': { 'value': None, 'shape': int64_array([2, 12, 25, 44]), 'kind': 'data' }, 'split_node': { 'kind': 'op', 'op': 'Split', 'axis': None }, 'out_data_2': { 'value': None, 'shape': None, 'kind': 'data' }, 'out_data_5': { 'value': None, 'shape': None, 'kind': 'data' }, 'out_data_7': { 'value': None, 'shape': None, 'kind': 'data' }, }, [ ('data_to_split', 'split_node', { 'in': 0 }), ('split_node', 'out_data_2', { 'out': 2 }), ('split_node', 'out_data_5', { 'out': 5 }), ('split_node', 'out_data_7', { 'out': 7 }), ])
def test_input_user_data_repack_dict_with_shapes(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges) shape_1 = np.array([1, 160, 160, 3]) shape_2 = np.array([1, 127, 127, 3]) input, freeze_placeholder = input_user_data_repack( graph, { 'Aa': shape_1, 'Bb': shape_2 }, None) self.assertDictEqual( input, { 'A': [{ 'shape': shape_1, 'port': None }], 'B': [{ 'shape': shape_2, 'port': None }] }) self.assertEqual(freeze_placeholder, None)
def test_get_sorted_outputs_fine_situation(self): nodes = {'A': {'type': 'Identity', 'kind': 'op'}, 'B': {'type': 'Identity', 'kind': 'op'}, 'C': {'type': 'Identity', 'kind': 'op'}, 'D': {'type': 'Identity', 'kind': 'op'}, 'E': {'type': 'Identity', 'kind': 'op'}, 'F': {'type': 'Identity', 'kind': 'op'}, 'G': {'type': 'Identity', 'kind': 'op'}, 'H': {'type': 'Identity', 'kind': 'op'}, 'Ad': {'value': None, 'kind': 'data'}, 'Bd': {'value': None, 'kind': 'data'}, 'Cd': {'value': None, 'kind': 'data', 'fw_tensor_debug_info': [('C', 0)]}, 'Dd': {'value': None, 'kind': 'data'}, 'Ed': {'value': None, 'kind': 'data', 'fw_tensor_debug_info': [('E', 0)]}, 'Fd': {'value': None, 'kind': 'data', 'fw_tensor_debug_info': [('F', 0)]}, 'Gd': {'value': None, 'kind': 'data', 'fw_tensor_debug_info': [('G', 0)]}, 'Hd': {'value': None, 'kind': 'data', 'fw_tensor_debug_info': [('H', 0)]} } edges = [ ('A', 'Ad', {'out': 0}), ('Ad', 'B', {'in': 0}), ('B', 'Bd', {'out': 0}), ('Bd', 'C', {'in': 0}), ('C', 'Cd', {'out': 0}), ('Cd', 'D', {'in': 0}), ('D', 'Dd', {'out': 0}), ('Dd', 'E', {'in': 0}), ('E', 'Ed', {'out': 0}), ('Cd', 'F', {'in': 0}), ('F', 'Fd', {'out': 0}), ('Fd', 'G', {'in': 0}), ('G', 'Gd', {'out': 0}), ('Cd', 'H', {'in': 0}), ('H', 'Hd', {'out': 0}) ] graph = build_graph_with_edge_attrs(nodes, edges) self.assertListEqual([node.id for node in get_sorted_outputs(graph)], ['Ed', 'Gd', 'Hd'])
def test_mish_fusion_different_source(self): # check case when different tensors goes to Mul and SoftPlus graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('softplus', {'op': 'SoftPlus'}), **regular_op('tanh', {'op': 'Tanh'}), **regular_op('mul', { 'op': 'Mul', 'name': 'final_mul' }), **result('result'), }, [('input', 'softplus', { 'in': 0, 'out': 0 }), ('input_2', 'mul', { 'in': 0, 'out': 0 }), ('softplus', 'tanh', { 'in': 0, 'out': 0 }), ('tanh', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'result', { 'in': 0, 'out': 0 })], {}) graph_ref = graph.copy() graph.stage = 'front' MishFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)