def test_packed(self): nodes = { **const('weights_inp', np.random.randn(100, 4)), **regular_op('indices_inp', {'type': 'Parameter'}), **regular_op('aten', {'type': None, 'kind': 'op', 'op': 'ATen', 'operator': 'embedding_bag', 'mode': 0, 'name': 'my_aten'}), **regular_op('emb_bag', {'type': 'EmbeddingBagPackedSum', 'kind': 'op', 'op': 'EmbeddingBagPackedSum'}), **result('result'), } edges = [('weights_inp', 'aten'), ('indices_inp', 'aten'), ('aten', 'result'), ] graph = build_graph(nodes, edges) graph.graph['layout'] = 'NCHW' graph.stage = 'front' edges_ref = [('weights_inp', 'emb_bag'), ('indices_inp', 'emb_bag'), ('emb_bag', 'result'), ] graph_ref = build_graph(nodes, edges_ref) AtenToEmbeddingBag().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_run_with_const_input(self): inp_shape = (1, 3, 1000, 1000) nodes = { **shaped_const_with_data('input', int64_array(inp_shape)), **regular_op('sizes_const', {'op': 'Const'}), **{'sizes_const_d': {'kind': 'data', 'value': float32_array([1., 1., 1., 100.])}}, **regular_op_with_empty_data('interpolate', {'type': 'Interpolate', 'shape_calculation_model': 'scales'}), **result('res'), } nodes_ref = { **shaped_const_with_data('input', int64_array(inp_shape)), **regular_op('sizes_const', {'op': 'Const', 'returns_shape_value': True}), **{'sizes_const_d': {'kind': 'data', 'value': float32_array([1., 1., 1., 100.])}}, **regular_op_with_empty_data('interpolate', {'type': 'Interpolate', 'shape_calculation_model': 'scales'}), **result('res'), } edges = [ *connect('input', '0:interpolate'), *connect('sizes_const', '1:interpolate'), *connect('interpolate', 'res'), ] graph = build_graph(nodes, edges) interp_node = Node(graph, 'interpolate') interp_node.add_input_port(2) MarkNodesWithShapeValues().find_and_replace_pattern(graph) graph_ref = build_graph(nodes_ref, edges) (flag, resp) = compare_graphs(graph, graph_ref, 'res', check_op_attrs=True) self.assertTrue(flag, resp)
def test_set_ports_split2(self): nodes = { **regular_op('op1', {}), **regular_op('split', {'op': 'Split'}), **regular_op('op2', {}), **regular_op('op3', {}), **regular_op('op4', {}), } graph = build_graph(nodes, [ ('op1', 'split', {'fw_tensor_debug_info': {}}), ('split', 'op2', {'fw_tensor_debug_info': {}, 'out_port': 0}), ('split', 'op4', {'fw_tensor_debug_info': {}, 'out_port': 4}), ('split', 'op3', {'fw_tensor_debug_info': {}, 'out_port': 6}) ], nodes_with_edges_only=True) graph.stage = 'front' graph.nodes()['split']['out_ports_count'] = 3 ref_graph = build_graph(nodes, [ *connect_front('op1:0', '0:split'), *connect_front('split:0', '0:op2'), *connect_front('split:1', '0:op4'), *connect_front('split:2', '0:op3') ], nodes_with_edges_only=True) SetPortsPattern().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, ref_graph, 'op4', check_op_attrs=True) self.assertTrue(flag, resp)
def test_set_ports_split3(self): nodes = { **regular_op('op1', {}), **regular_op('split', {'op': 'Split'}), **regular_op('op2', {}), **regular_op('op3', {}), **regular_op('op4', {}), } graph = build_graph(nodes, [('op1', 'split', { 'fw_tensor_debug_info': {} }), ('split', 'op2', { 'fw_tensor_debug_info': {}, 'out_port': 0 }), ('split', 'op3', { 'fw_tensor_debug_info': {}, 'out_port': 1 }), ('split', 'op4', { 'fw_tensor_debug_info': {}, 'out_port': 2 })], nodes_with_edges_only=True) Node(graph, 'split').out_port(0).get_connection().add_destination( Node(graph, 'op4').in_port(0)) graph.nodes()['split']['out_ports_count'] = 2 graph.stage = 'front' self.assertRaises(Error, SetPortsPattern().find_and_replace_pattern, graph)
def test_send_dynamic_shapes_case2(self): graph = build_graph( { **regular_op('placeholder1', { 'shape': int64_array([2, 3, 20, 20]), 'type': 'Parameter' }), **regular_op('placeholder2', { 'shape': int64_array([7, 4, 10]), 'type': 'Parameter' }), **regular_op( 'placeholder3', { 'shape': shape_array([5, 4, dynamic_dimension_value]), 'type': 'Parameter' }), **regular_op('mul', { 'shape': int64_array([7, 8]), 'type': 'Multiply' }) }, []) self.init_telemetry_mocks() send_shapes_info('framework', graph) tm.Telemetry.send_event.assert_any_call( 'mo', 'input_shapes', '{fw:framework,shape:"[ 2 3 20 20],[ 7 4 10],[ 5 4 -1]"}') tm.Telemetry.send_event.assert_any_call( 'mo', 'partially_defined_shape', '{partially_defined_shape:1,fw:framework}')
def test_serialize_old_api_map_result(self): graph = build_graph( { **regular_op('placeholder', { 'type': 'Parameter', 'rt_info': RTInfo() }), **regular_op('result', { 'type': 'Result', 'rt_info': RTInfo() }) }, [('placeholder', 'result')], {}, nodes_with_edges_only=True) result_node = Node(graph, 'result') result_node.rt_info.info[('old_api_map_order', 0)] = OldAPIMapOrder() result_node.rt_info.info[('old_api_map_order', 0)].old_api_transpose_result([0, 3, 1, 2]) net = Element('net') serialize_runtime_info(result_node, net) serialize_res = str(tostring(net)) self.assertTrue("name=\"old_api_map_order\"" in serialize_res) self.assertTrue("version=\"0\"" in serialize_res) self.assertTrue("value=\"0,3,1,2\"" in serialize_res) self.assertTrue(serialize_res.startswith("b'<net><rt_info>")) self.assertTrue(serialize_res.endswith("</rt_info></net>'"))
def test_send_shapes_info(self): graph = build_graph( { **regular_op('placeholder1', { 'shape': int64_array([1, 3, 20, 20]), 'type': 'Parameter' }), **regular_op('placeholder2', { 'shape': int64_array([2, 4, 10]), 'type': 'Parameter' }), **regular_op('mul', { 'shape': int64_array([7, 8]), 'type': 'Multiply' }) }, []) self.init_telemetry_mocks() send_shapes_info('framework', graph) tm.Telemetry.send_event.assert_any_call( 'mo', 'input_shapes', '{fw:framework,shape:"[ 1 3 20 20],[ 2 4 10]"}') tm.Telemetry.send_event.assert_any_call( 'mo', 'partially_defined_shape', '{partially_defined_shape:0,fw:framework}')
def test_backward_bfs_for_op_closest_op_detected(self): """ input -> hsigmoid_1 -> hsigmoid_2 -> result The returned op should be first met HSigmoid which is hsigmoid_2 """ nodes = { **regular_op('input', {'op': 'Parameter'}), **regular_op('hsigmoid_1', {'op': 'HSigmoid'}), **regular_op('hsigmoid_2', {'op': 'HSigmoid'}), **result('result'), } edges = [ ('input', 'hsigmoid_1', { 'out': 0, 'in': 0 }), ('hsigmoid_1', 'hsigmoid_2', { 'out': 0, 'in': 0 }), ('hsigmoid_2', 'result', { 'out': 0, 'in': 0 }), ] graph = build_graph_with_edge_attrs(nodes, edges) graph.stage = 'front' found_nodes = backward_bfs_for_operation(Node(graph, 'result'), ['HSigmoid']) self.assertEqual(len(found_nodes), 1) self.assertEqual(found_nodes[0].id, 'hsigmoid_2')
def test_set_ports_chain(self): nodes = { **regular_op('op1', {}), **regular_op('op2', {}), **regular_op('op3', {}), } graph = build_graph(nodes, [('op1', 'op2', { 'fw_tensor_debug_info': {} }), ('op2', 'op3', { 'fw_tensor_debug_info': {} })], nodes_with_edges_only=True) graph.stage = 'front' ref_graph = build_graph(nodes, [ *connect_front('op1:0', '0:op2'), *connect_front('op2:0', '0:op3') ], nodes_with_edges_only=True) SetPortsPattern().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, ref_graph, 'op3', check_op_attrs=True) self.assertTrue(flag, resp)
def test_swish_with_sigmoid_without_beta_different_tensors(self): graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('sigmoid', {'op': 'Sigmoid'}), **regular_op('mul', { 'op': 'Mul', 'name': 'final_mul' }), **result('result'), }, [('input_2', 'mul', { 'in': 0, 'out': 0 }), ('input', 'sigmoid', { 'in': 0, 'out': 0 }), ('sigmoid', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'result', { 'in': 0, 'out': 0 })], {}) graph_ref = graph.copy() graph.stage = 'front' SwishWithSigmoidWithoutBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def build_loop_graph(body_graph): # create fake Loop operation nodes = { **regular_op('input', {'op': 'Parameter'}), **regular_op('loop', {'op': 'Loop', 'body': body_graph, 'sub_graphs': ['body']}), **result('result'), } edges = [*connect_front('input', '0:loop'), *connect_front('loop:0', 'result'), ] graph = build_graph(nodes, edges) graph.stage = 'front' return graph
def setUp(self): self.start_node_name = 'StatefulPartitionedCall/Preprocessor/unstack' self.end_node_name = 'StatefulPartitionedCall/Preprocessor/stack' self.end_node_name2 = 'StatefulPartitionedCall/Preprocessor/stack2' self.loop_start_node_name = 'prefix/map/while/Preprocessor/unstack' self.loop_end_node_name = 'prefix/map/while/Preprocessor/stack' self.mul_const = float32_array([0.025, 0.374, -0.45]) self.sub_const = float32_array([2.0, 3.0, 4.0]) self.nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('mul', {'op': 'Mul', 'type': 'Multiply', 'name': 'my_mul'}), **regular_op('sub', {'op': 'Sub', 'type': 'Subtract', 'name': 'my_sub'}), **const('mul_const', self.mul_const), **const('sub_const', self.sub_const), **regular_op(self.start_node_name, {'op': 'Identity'}), **regular_op(self.end_node_name, {'op': 'Identity'}), **regular_op(self.end_node_name2, {'op': 'Identity'}), **regular_op('loop', {'op': 'Loop', 'body': None}), **regular_op('resize', {'type': 'Interpolate'}), **result('result'), } self.replacement_desc = {'start_nodes': [self.start_node_name], 'end_nodes': [self.end_node_name, self.end_node_name2]}
class MishFusionTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('softplus', {'op': 'SoftPlus'}), **regular_op('tanh', {'op': 'Tanh'}), **regular_op('mul', {'op': 'Mul', 'name': 'final_mul'}), **result('result'), } edges = [('input', 'softplus', {'in': 0, 'out': 0}), ('input', 'mul', {'in': 0, 'out': 0}), ('softplus', 'tanh', {'in': 0, 'out': 0}), ('tanh', 'mul', {'in': 1, 'out': 0}), ('mul', 'result', {'in': 0, 'out': 0})] def test_mish_fusion(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' MishFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Mish') def test_mish_fusion_different_source(self): # check case when different tensors goes to Mul and SoftPlus graph = build_graph_with_edge_attrs({ **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('softplus', {'op': 'SoftPlus'}), **regular_op('tanh', {'op': 'Tanh'}), **regular_op('mul', {'op': 'Mul', 'name': 'final_mul'}), **result('result'), }, [('input', 'softplus', {'in': 0, 'out': 0}), ('input_2', 'mul', {'in': 0, 'out': 0}), ('softplus', 'tanh', {'in': 0, 'out': 0}), ('tanh', 'mul', {'in': 1, 'out': 0}), ('mul', 'result', {'in': 0, 'out': 0})], {}) graph_ref = graph.copy() graph.stage = 'front' MishFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_backward_bfs_for_op_no_ops_detected(self): nodes = {**regular_op('input', {'op': 'Parameter'}), **regular_op('hsigmoid', {'op': 'HSigmoid'}), **result('result'), } edges = [('input', 'hsigmoid', {'out': 0, 'in': 0}), ('hsigmoid', 'result', {'out': 0, 'in': 0}), ] graph = build_graph_with_edge_attrs(nodes, edges) graph.stage = 'front' found_nodes = backward_bfs_for_operation(Node(graph, 'result'), ['NonExistingOp']) self.assertEqual(len(found_nodes), 0)
def build_conv_graph(): nodes = { **const('weights', np.random.randn(1, 1, 1, 1)), **regular_op('input', {'op': 'Parameter'}), **regular_op('conv', {'op': 'Conv2D', 'layout': 'NHWC'}), **result('result'), } edges = [*connect_front('input', '0:conv'), *connect_front('weights', '1:conv'), *connect_front('conv:0', 'result'), ] graph = build_graph(nodes, edges) graph.stage = 'front' return graph
def test_serialize_old_api_map_parameter(self): graph = build_graph({**regular_op('placeholder', {'type': 'Parameter', 'rt_info': RTInfo()}), **result('result')}, [('placeholder', 'result')], {}, nodes_with_edges_only=True) param_node = Node(graph, 'placeholder') param_node.rt_info.info[('old_api_map', 0)] = OldAPIMap() param_node.rt_info.info[('old_api_map', 0)].old_api_transpose_parameter([0, 2, 3, 1]) param_node.rt_info.info[('old_api_map', 0)].old_api_convert(np.float32) net = Element('net') serialize_runtime_info(param_node, net) serialize_res = str(tostring(net)) self.assertTrue("name=\"old_api_map\"" in serialize_res) self.assertTrue("version=\"0\"" in serialize_res) self.assertTrue("order=\"0,2,3,1\"" in serialize_res) self.assertTrue("element_type=\"f32\"" in serialize_res) self.assertTrue(serialize_res.startswith("b'<net><rt_info>")) self.assertTrue(serialize_res.endswith("</rt_info></net>'")) param_node.rt_info.info[('old_api_map', 0)] = OldAPIMap() param_node.rt_info.info[('old_api_map', 0)].old_api_convert(np.float16) net = Element('net') serialize_runtime_info(param_node, net) serialize_res = str(tostring(net)) self.assertTrue("name=\"old_api_map\"" in serialize_res) self.assertTrue("version=\"0\"" in serialize_res) self.assertTrue("order=\"\"" in serialize_res) self.assertTrue("element_type=\"f16\"" in serialize_res) self.assertTrue(serialize_res.startswith("b'<net><rt_info>")) self.assertTrue(serialize_res.endswith("</rt_info></net>'"))
def generate_offsets(): offset_edges = [] offset_nodes = {} for i, t in enumerate(time_offsets): offset_nodes.update(**regular_op('memoryoffset_' + str(i), {'type': None})) if t != 0: offset_edges.append( ('placeholder', 'memoryoffset_' + str(i), { 'out': 0, 'in': 0 })) offset_edges.append(('memoryoffset_' + str(i), 'concat', { 'out': 0, 'in': i })) else: offset_edges.append(('placeholder', 'concat', { 'out': 0, 'in': i })) return offset_nodes, offset_edges
def test_backward_bfs_for_op_parallel_branch_op_detected(self): r""" input_1 -> hsigmoid_1 -> hsigmoid_2 -> \ - Concat->result / input_2 -> hsigmoid_3 -> hsigmoid_4 -> The returned op should be first met HSigmoids which are hsigmoid_2 and hsigmoid_4 """ nodes = {**regular_op('input_1', {'op': 'Parameter'}), **regular_op('hsigmoid_1', {'op': 'HSigmoid'}), **regular_op('hsigmoid_2', {'op': 'HSigmoid'}), **regular_op('input_2', {'op': 'Parameter'}), **regular_op('hsigmoid_3', {'op': 'HSigmoid'}), **regular_op('hsigmoid_4', {'op': 'HSigmoid'}), **regular_op('concat', {'op': 'Concat'}), **result('result'), } edges = [('input_1', 'hsigmoid_1', {'out': 0, 'in': 0}), ('hsigmoid_1', 'hsigmoid_2', {'out': 0, 'in': 0}), ('hsigmoid_2', 'concat', {'out': 0, 'in': 0}), ('input_2', 'hsigmoid_3', {'out': 0, 'in': 0}), ('hsigmoid_3', 'hsigmoid_4', {'out': 0, 'in': 0}), ('hsigmoid_4', 'concat', {'out': 0, 'in': 1}), ('concat', 'result', {'out': 0, 'in': 0}), ] graph = build_graph_with_edge_attrs(nodes, edges) graph.stage = 'front' found_nodes = backward_bfs_for_operation(Node(graph, 'result'), ['HSigmoid']) self.assertEqual(len(found_nodes), 2) self.assertSetEqual({found_nodes[0].id, found_nodes[1].id}, {'hsigmoid_2', 'hsigmoid_4'})
def test_hsigmoid_with_relu_mul_different_tensors(self): graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('add', {'op': 'Add'}), **regular_op('max', {'op': 'Maximum'}), **regular_op('min', {'op': 'Minimum'}), **regular_op('mul', {'op': 'Mul'}), **regular_op('mul_2', { 'op': 'Mul', 'name': 'final_mul' }), **const('const_0', float_array([0.0])), **const('const_3', float_array([3.0])), **const('const_6', float_array([6.0])), **const('const_1_6', float_array([1.0 / 6.0])), **result('result'), }, [('input_2', 'mul', { 'in': 1, 'out': 0 }), ('input', 'add', { 'in': 0, 'out': 0 }), ('const_3', 'add', { 'in': 1, 'out': 0 }), ('add', 'max', { 'in': 0, 'out': 0 }), ('const_0', 'max', { 'in': 1, 'out': 0 }), ('max', 'min', { 'in': 0, 'out': 0 }), ('const_6', 'min', { 'in': 1, 'out': 0 }), ('min', 'mul', { 'in': 0, 'out': 0 }), ('mul', 'mul_2', { 'in': 0, 'out': 0 }), ('const_1_6', 'mul_2', { 'in': 1, 'out': 0 }), ('mul_2', 'result', { 'in': 0, 'out': 0 })]) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_backward_bfs_for_op_parallel_branch_stop_op(self): r""" input_1 -> hsigmoid_1 -> hsigmoid_2 -> \ - Concat->result / input_2 -> hsigmoid_3 -> ShapeOf -> The returned op should be first met HSigmoids which is hsigmoid_2, but not the hsigmoid_3 located after banned operation of type "ShapeOf" """ nodes = {**regular_op('input_1', {'op': 'Parameter'}), **regular_op('hsigmoid_1', {'op': 'HSigmoid'}), **regular_op('hsigmoid_2', {'op': 'HSigmoid'}), **regular_op('input_2', {'op': 'Parameter'}), **regular_op('hsigmoid_3', {'op': 'HSigmoid'}), **regular_op('shapeof', {'op': 'ShapeOf'}), **regular_op('concat', {'op': 'Concat'}), **result('result'), } edges = [('input_1', 'hsigmoid_1', {'out': 0, 'in': 0}), ('hsigmoid_1', 'hsigmoid_2', {'out': 0, 'in': 0}), ('hsigmoid_2', 'concat', {'out': 0, 'in': 0}), ('input_2', 'hsigmoid_3', {'out': 0, 'in': 0}), ('hsigmoid_3', 'shapeof', {'out': 0, 'in': 0}), ('shapeof', 'concat', {'out': 0, 'in': 1}), ('concat', 'result', {'out': 0, 'in': 0}), ] graph = build_graph_with_edge_attrs(nodes, edges) graph.stage = 'front' found_nodes = backward_bfs_for_operation(Node(graph, 'result'), ['HSigmoid'], ['ShapeOf']) self.assertEqual(len(found_nodes), 1) self.assertEqual(found_nodes[0].id, 'hsigmoid_2')
def build_parameter_result_graph(): nodes = { **regular_op('input', {'op': 'Parameter'}), **result('result'), } edges = [*connect_front('input', '0:result'), ] graph = build_graph(nodes, edges) graph.stage = 'front' return graph
def test(self): nodes = { **regular_op('input', {'type': 'Parameter'}), **const('depth', int64_array([2])), **regular_op('onehot', { 'type': 'OneHot', 'kind': 'op', 'op': 'OneHot' }), **regular_op('reshape', { 'type': 'Reshape', 'kind': 'op', 'op': 'Reshape' }), **const('reshape_dims', int64_array([])), **result('result'), } edges = [ ('input', 'onehot'), ('depth', 'onehot'), ('onehot', 'result'), ] graph = build_graph(nodes, edges) graph.graph['layout'] = 'NCHW' graph.stage = 'front' edges_ref = [ ('input', 'onehot'), ('depth', 'reshape'), ('reshape_dims', 'reshape'), ('reshape', 'onehot'), ('onehot', 'result'), ] graph_ref = build_graph(nodes, edges_ref) OneHotDepthNormalizer().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_reduce_axis_is_None(self): graph = build_graph(nodes, edges, nodes_with_edges_only=True) graph.stage = 'front' ReduceAxisNormalizer().find_and_replace_pattern(graph) ref_nodes = nodes.copy() ref_nodes.update({ **regular_op('rank', { 'op': 'Rank', 'type': None }), **regular_op('range', { 'op': 'Range', 'type': 'Range' }), **regular_op('begin', { 'type': 'Const', 'value': int64_array([0]) }), **regular_op('step', { 'type': 'Const', 'value': int64_array([1]) }), }) graph_ref = build_graph(ref_nodes, [ *edges, *connect_front('parameter:0', 'rank'), *connect_front('begin:0', '0:range'), *connect_front('rank:0', '1:range'), *connect_front('step:0', '2:range'), *connect_front('range:0', '1:reduce'), ], nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
class SoftplusFusionTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('exp', {'op': 'Exp'}), **const('const_1', float_array([1.0])), **regular_op('add', {'op': 'Add'}), **regular_op('ln', {'op': 'Log', 'name': 'final_log'}), **result('result'), } edges = [('input', 'exp', {'in': 0, 'out': 0}), ('const_1', 'add', {'in': 0, 'out': 0}), ('exp', 'add', {'in': 1, 'out': 0}), ('add', 'ln', {'in': 0, 'out': 0}), ('ln', 'result', {'in': 0, 'out': 0})] def test_softplus_fusion_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' SoftplusFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_log')) == 1 and graph.get_op_nodes(name='final_log')[0].op == 'SoftPlus') def test_softplus_fusion_test_wrong_const(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {'const_1': {'value': float_array([0.9999])}}) graph_ref = graph.copy() graph.stage = 'front' SoftplusFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_mish_fusion_different_source(self): # check case when different tensors goes to Mul and SoftPlus graph = build_graph_with_edge_attrs({ **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('softplus', {'op': 'SoftPlus'}), **regular_op('tanh', {'op': 'Tanh'}), **regular_op('mul', {'op': 'Mul', 'name': 'final_mul'}), **result('result'), }, [('input', 'softplus', {'in': 0, 'out': 0}), ('input_2', 'mul', {'in': 0, 'out': 0}), ('softplus', 'tanh', {'in': 0, 'out': 0}), ('tanh', 'mul', {'in': 1, 'out': 0}), ('mul', 'result', {'in': 0, 'out': 0})], {}) graph_ref = graph.copy() graph.stage = 'front' MishFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def create_fake_quantize_net(self, il, ih, num_bits, narrow_range, nudged_il, nudged_ih, expected_step, ir_version, use_new_frontend): # original tf model import tensorflow as tf tf.compat.v1.reset_default_graph() with tf.compat.v1.Session() as sess: data = tf.compat.v1.placeholder(tf.float32, [11], 'parameter') input_min = tf.constant(il, name='input_min') input_max = tf.constant(ih, name='input_max') tf.quantization.fake_quant_with_min_max_vars(data, input_min, input_max, num_bits, narrow_range, 'fq') tf.compat.v1.global_variables_initializer() tf_net = sess.graph_def # reference graph to compare with IR ref_net = None if check_ir_version(10, None, ir_version) and not use_new_frontend: levels = 2 ** num_bits - int(narrow_range) # data (shape, value) -> const (shape, vale) -> data (shape, no value) const_for_layer_tests = lambda name, value: { **{name + '_dd': {'kind': 'data', 'value': value, 'shape': value.shape}}, **{name: {'kind': 'op', 'type': 'Const'}}, **shaped_data(name + '_d', int64_array(value.shape))} connect_const_for_layer_tests = lambda first_tensor_name, second_tensor_name: [ *connect_front(first_tensor_name + '_dd', first_tensor_name), *connect(first_tensor_name, second_tensor_name)] nodes = { **regular_op_with_shaped_data('parameter', [11], {'type': 'Parameter'}), **const_for_layer_tests('il', np.array([nudged_il], dtype=np.float32)), **const_for_layer_tests('ih', np.array([nudged_ih], dtype=np.float32)), **const_for_layer_tests('ol', np.array([nudged_il], dtype=np.float32)), **const_for_layer_tests('oh', np.array([nudged_ih], dtype=np.float32)), **regular_op_with_shaped_data('fq', [11], {'type': 'FakeQuantize', 'levels': levels}), **regular_op('result', {'type': 'Result'}), } edges = [ *connect('parameter', '0:fq'), *connect_const_for_layer_tests('il', '1:fq'), *connect_const_for_layer_tests('ih', '2:fq'), *connect_const_for_layer_tests('ol', '3:fq'), *connect_const_for_layer_tests('oh', '4:fq'), *connect('fq', 'result'), ] ref_net = build_graph(nodes, edges) return tf_net, ref_net
def build_body_graph(pre_processing: str): nodes = { **regular_op('input', { 'type': 'Parameter', 'op': 'Parameter' }), **regular_op('mul', { 'op': 'Mul', 'type': 'Multiply', 'name': 'my_body_mul' }), **regular_op('sub', { 'op': 'Sub', 'type': 'Subtract', 'name': 'my_body_sub' }), **const('body_mul_const', self.mul_const), **const('body_sub_const', self.sub_const), **regular_op(self.loop_start_node_name, {'op': 'Identity'}), **regular_op(self.loop_end_node_name, {'op': 'Identity'}), **regular_op('resize', {'type': 'Interpolate'}), **result('result'), } if pre_processing == 'no': edges = [ *connect_front('input', self.loop_start_node_name), *connect_front(self.loop_start_node_name, 'resize'), *connect_front('resize', self.loop_end_node_name), *connect_front(self.loop_end_node_name, 'result'), ] elif pre_processing == 'trailing': edges = [ *connect_front('input', self.loop_start_node_name), *connect_front(self.loop_start_node_name, 'resize'), *connect_front('resize', self.loop_end_node_name), *connect_front(self.loop_end_node_name, '0:mul'), *connect_front('body_mul_const', '1:mul'), *connect_front('body_sub_const', '0:sub'), *connect_front('mul', '1:sub'), *connect_front('sub', 'result'), ] else: edges = [ *connect_front('input', '0:mul'), *connect_front('body_mul_const', '1:mul'), *connect_front('body_sub_const', '0:sub'), *connect_front('mul', '1:sub'), *connect_front('sub', self.loop_start_node_name), *connect_front(self.loop_start_node_name, 'resize'), *connect_front('resize', self.loop_end_node_name), *connect_front(self.loop_end_node_name, 'result'), ] graph = build_graph(nodes, edges, nodes_with_edges_only=True) graph.stage = 'front' return graph
def test_swish_with_sigmoid_with_beta_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) new_ref_nodes = ref_nodes.copy() new_ref_nodes.update(**regular_op('beta', {'type': 'Parameter'})) graph_ref = build_graph(new_ref_nodes, ref_edges + [('beta', 'swish')]) graph.stage = 'front' SwishWithSigmoidWithBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Swish')
def setUp(self): nodes = { **regular_op_with_shaped_data('boxes', [10, 100, 4], {'type': 'Parameter'}), **regular_op_with_shaped_data('scores', [10, 5, 100], {'type': 'Parameter'}), **valued_const_with_data('max_output_per_class', int64_array(7)), **regular_op('nms', {'op': 'NonMaxSuppression', 'type': 'NonMaxSuppression', 'name': 'nms'}), **empty_data('nms_data_0'), **empty_data('nms_data_1'), **empty_data('nms_data_2'), **result('output_0'), **result('output_1'), **result('output_2'), } self.graph = build_graph(nodes, [ *connect('boxes', '0:nms'), *connect('scores', '1:nms'), *connect('max_output_per_class', '2:nms'), *connect('nms:0', 'nms_data_0', front_phase=True), # Use this WA for correct creating operation *connect('nms_data_0', 'output_0', front_phase=True), # with multiple outputs ], nodes_with_edges_only=True) self.graph_nms_5_2_outs = build_graph(nodes, [ *connect('boxes', '0:nms'), *connect('scores', '1:nms'), *connect('max_output_per_class', '2:nms'), *connect('nms:0', 'nms_data_0', front_phase=True), # Use this WA for correct creating operation *connect('nms_data_0', 'output_0', front_phase=True), # with multiple outputs *connect('nms:1', 'nms_data_1', front_phase=True), *connect('nms_data_1', 'output_1', front_phase=True), ], nodes_with_edges_only=True) self.graph_nms_5_3_outs = build_graph(nodes, [ *connect('boxes', '0:nms'), *connect('scores', '1:nms'), *connect('max_output_per_class', '2:nms'), *connect('nms:0', 'nms_data_0', front_phase=True), # Use this WA for correct creating operation *connect('nms_data_0', 'output_0', front_phase=True), # with multiple outputs *connect('nms:1', 'nms_data_1', front_phase=True), *connect('nms_data_1', 'output_1', front_phase=True), *connect('nms:2', 'nms_data_2', front_phase=True), *connect('nms_data_2', 'output_2', front_phase=True), ], nodes_with_edges_only=True)
class HSigmoidWithReluMulTest(unittest.TestCase): nodes = { **regular_op('input', {'type': 'Parameter'}), **regular_op('add', {'op': 'Add'}), **regular_op('relu', {'op': 'ReLU'}), **regular_op('min', {'op': 'Minimum'}), **regular_op('mul', { 'op': 'Mul', 'name': 'final_mul' }), **const('add_const', float_array([3.0])), **const('min_const', float_array([6.0])), **const('mul_const', float_array([1.0 / 6.0])), **result('result'), } edges = [('input', 'add', { 'in': 0, 'out': 0 }), ('add_const', 'add', { 'in': 1, 'out': 0 }), ('add', 'relu', { 'in': 0, 'out': 0 }), ('relu', 'min', { 'in': 0, 'out': 0 }), ('min_const', 'min', { 'in': 1, 'out': 0 }), ('min', 'mul', { 'in': 0, 'out': 0 }), ('mul_const', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'result', { 'in': 0, 'out': 0 })] def test_hsigmoid_with_relu_mul(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'HSigmoid') self.assertTrue( graph.get_op_nodes( name='final_mul')[0].out_nodes()[0].node == 'result') def test_hsigmoid_with_relu_mul_wrong_constant(self): graph = build_graph_with_edge_attrs( self.nodes, self.edges, {'add_const': { 'value': float_array([0.00001]) }}) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) def test_hsigmoid_with_relu_mul_different_tensors(self): graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('add', {'op': 'Add'}), **regular_op('max', {'op': 'Maximum'}), **regular_op('min', {'op': 'Minimum'}), **regular_op('mul', {'op': 'Mul'}), **regular_op('mul_2', { 'op': 'Mul', 'name': 'final_mul' }), **const('const_0', float_array([0.0])), **const('const_3', float_array([3.0])), **const('const_6', float_array([6.0])), **const('const_1_6', float_array([1.0 / 6.0])), **result('result'), }, [('input_2', 'mul', { 'in': 1, 'out': 0 }), ('input', 'add', { 'in': 0, 'out': 0 }), ('const_3', 'add', { 'in': 1, 'out': 0 }), ('add', 'max', { 'in': 0, 'out': 0 }), ('const_0', 'max', { 'in': 1, 'out': 0 }), ('max', 'min', { 'in': 0, 'out': 0 }), ('const_6', 'min', { 'in': 1, 'out': 0 }), ('min', 'mul', { 'in': 0, 'out': 0 }), ('mul', 'mul_2', { 'in': 0, 'out': 0 }), ('const_1_6', 'mul_2', { 'in': 1, 'out': 0 }), ('mul_2', 'result', { 'in': 0, 'out': 0 })]) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)