def test_useless_pad_constant_input(self): nodes = { **regular_op_with_shaped_data('placeholder', [1, 10, 20, 3], { 'type': 'Parameter' }), **regular_op_with_shaped_data('pad', [1, 10, 20, 3], { 'type': 'Pad', 'op': 'Pad' }), **valued_const_with_data('pads_begin', int64_array([0, 0, 0, 0])), **valued_const_with_data('pads_end', int64_array([0, 0, 0, 0])), **valued_const_with_data('fill_value', np.array(1)), **result('result'), } edges = [ *connect('placeholder', '0:pad'), *connect('pads_begin', '1:pad'), *connect('pads_end', '2:pad'), *connect('fill_value', '3:pad'), *connect('pad', 'result'), ] graph = build_graph(nodes, edges) RemoveUselessPad().find_and_replace_pattern(graph) ref_graph = build_graph(nodes, [*connect('placeholder', 'result')]) (flag, resp) = compare_graphs(graph, ref_graph, 'result') self.assertTrue(flag, resp)
def nodes(self, input_shape, transpose_shape, fq_shape, is_input_const): nodes = { **valued_const_with_data('il', np.array([[[[0]]]])), **valued_const_with_data('ih', np.array([[[[255]]]])), **valued_const_with_data('ol', np.array([[[[0]]]])), **valued_const_with_data('oh', np.array([[[[255]]]])), **regular_op_with_shaped_data( 'FQ', fq_shape, dict(type='FakeQuantize', op='FakeQuantize', infer=FakeQuantize.infer)), **valued_const_with_data('order', int64_array([0, 2, 3, 1])), **regular_op_with_shaped_data( 'transpose', transpose_shape, dict(type='Transpose', op='Transpose', infer=Transpose.infer)), **regular_op_with_shaped_data('relu', fq_shape, dict(type='Relu', op='Relu')), **result(), } if is_input_const: input_node = shaped_const_with_data('input', input_shape) else: input_node = regular_op_with_shaped_data( 'input', input_shape, dict(type='Parameter', op='Parameter')) nodes.update(input_node) return nodes
def test_no_max_input(self): nodes = { **regular_op_with_shaped_data('placeholder', [1, 3, 20, 20], { 'type': 'Parameter' }), **regular_op_with_shaped_data('a_clamp', [1, 3, 20, 20], { 'type': None, 'op': 'Clamp' }), **regular_op_with_shaped_data('maximum', [1, 3, 20, 20], { 'type': 'Maximum', 'op': 'Maximum' }), **valued_const_with_data('min', np.array(-3.5)), **result('result'), } edges = [ *connect('placeholder', '0:a_clamp'), *connect('min', '1:a_clamp'), *connect('a_clamp', 'result'), ] graph = build_graph(nodes, edges) ClampNormalizer().find_and_replace_pattern(graph) ref_graph = build_graph(nodes, [ *connect('placeholder', '0:maximum'), *connect('min', '1:maximum'), *connect('maximum', 'result') ]) (flag, resp) = compare_graphs(graph, ref_graph, 'result') self.assertTrue(flag, resp)
def test_auto_disable_nhwc_to_nchw(self): shape_len = 4 shape = np.array(range(shape_len)) add_shape = shape graph_nodes = { **regular_op_with_shaped_data('placeholder1', shape, {'type': 'Parameter', 'rt_info': RTInfo(), 'shape': shape}), **regular_op_with_shaped_data('placeholder2', shape, {'type': 'Parameter', 'rt_info': RTInfo(), 'shape': shape}), **regular_op_with_shaped_data('result', shape, {'type': 'Result', 'rt_info': RTInfo(), 'shape': shape}), **regular_op_with_shaped_data('add', add_shape, {'type': 'Add', 'op': 'Add', 'infer': copy_shape_infer}), } graph = build_graph(graph_nodes, edges) graph.graph['cmd_params'].auto_disable_nhwc_to_nchw = True graph_ref = build_graph(graph_nodes, edges) param_node = Node(graph, 'placeholder1') result_node = Node(graph, 'result') PreserveRuntimeInfo().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) rt_info = param_node.rt_info.info old_api_map = rt_info[('old_api_map_order', 0)].info self.assertTrue(np.array_equal(old_api_map['inverse_order'], [0, 2, 3, 1])) rt_info = result_node.rt_info.info old_api_map = rt_info[('old_api_map_order', 0)].info self.assertTrue(np.array_equal(old_api_map['order'], [0, 3, 1, 2]))
def test_gather_tree_normalizer(self): nodes = { **regular_op_with_shaped_data('data_0', [100, 1, 10], {'type': 'Parameter'}), **regular_op_with_shaped_data('data_1', [100, 1, 10], {'type': 'Parameter'}), **regular_op_with_shaped_data('data_2', [1], {'type': 'Parameter'}), **regular_op_with_shaped_data('gather_tree', [1], {'type': 'GatherTree'}), **valued_const_with_data('const', np.array([2])), **result('result'), } edges = [*connect('data_0', '0:gather_tree'), *connect('data_1', '1:gather_tree'), *connect('data_2', '2:gather_tree'), *connect('const', '3:gather_tree'), *connect('gather_tree', 'result'), ] ref_edges = [*connect('data_0', '0:gather_tree'), *connect('data_1', '1:gather_tree'), *connect('data_2', '2:gather_tree'), *connect('const', '0:squeeze'), *connect('squeeze_axis', '1:squeeze'), *connect('squeeze', '3:gather_tree'), *connect('gather_tree', 'result'),] ref_nodes = nodes.copy() ref_nodes.update({**valued_const_with_data('squeeze_axis', int64_array([0])), **regular_op_with_shaped_data('squeeze', [], {'type': 'Squeeze'})}) graph = build_graph(nodes, edges) GatherTreeNormalizer().find_and_replace_pattern(graph) # run shape inference to make sure that shape overriding happened shape_inference(graph) ref_graph = build_graph(ref_nodes, ref_edges) (flag, resp) = compare_graphs(graph, ref_graph, 'result') self.assertTrue(flag, resp)
def test_div_with_integer(self): # Test where transformation should not be applied because the divisor is integer graph = build_graph( { **regular_op_with_shaped_data('parameter', [1, 227, 227, 3], { 'type': 'Parameter', 'data_type': np.int32 }), **valued_const_with_data('const', np.array([-1.], dtype=np.int32)), **regular_op_with_shaped_data('div', None, { 'op': 'Div', 'type': 'Divide', 'name': 'my_div' }), **result() }, [ *connect('parameter:0', '0:div'), *connect_data('const:0', '1:div'), *connect('div', 'output'), ]) graph_ref = graph.copy() Div().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_v7_group_convolution_resolver_weight_are_in_the_right_layout( self): nodes = { **regular_op_with_shaped_data('input', [1, 3, 224, 224], { 'type': 'Parameter' }), **valued_const_with_data('weights', np.ones([24, 1, 7, 7])), **regular_op_with_shaped_data('convolution', None, { 'type': 'Convolution', 'group': 3, 'output': 24 }), **result(), } edges = [ *connect('input', '0:convolution'), *connect('weights', '1:convolution'), *connect('convolution', 'output'), ] graph = build_graph(nodes, edges) V7ConvolutionWithGroupsResolver().find_and_replace_pattern(graph) graph_ref = build_graph(nodes, edges) (flag, resp) = compare_graphs(graph, graph_ref, last_node='output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_reshape_on_the_B_input(self, in1_shape, in2_shape, reshape_pattern, transpose_a, transpose_b, updated_pattern): nodes = { **regular_op_with_shaped_data('in_1', in1_shape, dict(type='Parameter', op='Parameter')), **regular_op_with_shaped_data('in_2', in2_shape, dict(type='Parameter', op='Parameter')), **valued_const_with_data('dim', int64_array(reshape_pattern)), **op_with_empty_data('reshape', dict(type='Reshape', op='Reshape', infer=Reshape.infer, need_shape_inference=True)), **op_with_empty_data('matmul', dict(type='MatMul', op='MatMul', infer=MatMul.infer, need_shape_inference=True, transpose_a=transpose_a, transpose_b=transpose_b, dim_attrs={})), **result(), } edges = [ *connect('in_1:0', '0:matmul'), *connect('in_2:0', '0:reshape'), *connect('dim:0', '1:reshape'), *connect('reshape:0', '1:matmul'), *connect('matmul:0', 'output'), ] graph = build_graph(nodes_attrs=nodes, edges=edges, cli=Namespace(static_shape=True)) graph.clean_up() SmartReshape_HC_Reshape_MatMul().find_and_replace_pattern(graph) graph.clean_up() graph_ref = build_graph(nodes_attrs=nodes, edges=edges, update_attributes={ 'dim': {'value': int64_array(updated_pattern)}, 'dim_d': {'value': int64_array(updated_pattern)}}) graph_ref.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_backward_bfs_multi_consumer_data_nodes(self): # Placeholder-> Mul -> Result # Const -/ \- Result2 graph = build_graph( { **regular_op_with_shaped_data('parameter', [1], { 'op': 'Parameter' }), **valued_const_with_data('const', int64_array([5])), **regular_op_with_shaped_data('mul', [1], {'op': 'Mul'}), **result('result'), **result('result2'), }, [ *connect('parameter', '0:mul'), *connect('const', '1:mul'), *connect('mul:0', 'result'), *connect_data('mul', 'result2'), ]) res = common_bfs(Node(graph, 'result'), ['Mul'], ['Parameter'], is_backward=True, attr_to_check='op', follow_multi_consumer_data_nodes=True) self.assertTrue( len(res) == 1, 'The multi-consumer data node "mul_d" was not followed') res = common_bfs(Node(graph, 'result'), ['Mul'], ['Parameter'], is_backward=True, attr_to_check='op') self.assertTrue( len(res) == 0, 'The multi-consumer data node "mul_d" was followed')
def nodes_dict(original, transformed=None, levels=255, data=None, il=[-127], ih=[127], ol=[-127], oh=[127]): shape = [1, 2, 3, 4] if data is None else np.array(data).shape data = np.ones(shape, dtype=original) if data is None else np.array(data, dtype=original) int_data = data.astype(dtype=np.int8) transformed = transformed if transformed is not None else original return { **valued_const_with_data('weights', data), **valued_const_with_data('int_weights', int_data), **regular_op_with_shaped_data( 'cast', shape, {'type': 'Convert', 'op': 'Cast', 'infer': Cast.infer, 'dst_type': transformed}), **valued_const_with_data('il', np.array(il)), **valued_const_with_data('ih', np.array(ih)), **valued_const_with_data('ol', np.array(ol)), **valued_const_with_data('oh', np.array(oh)), **regular_op_with_shaped_data( 'FQ', shape, {'type': 'FakeQuantize', 'infer': FakeQuantize.infer, 'stop_value_propagation': True, 'levels': levels, 'op': 'FakeQuantize'}), **valued_const_with_data('zp', np.array([0])), **valued_const_with_data('scale', np.array([1])), **regular_op_with_shaped_data( 'sub', shape, {'type': 'Subtract', 'op': 'Sub', 'infer': lambda node: eltwise_infer(node, Sub.operation)}), **regular_op_with_shaped_data( 'mul', shape, {'type': 'Multiply', 'op': 'Mul', 'infer': lambda node: eltwise_infer(node, Mul.operation)}), **result() }
def create_einsum_graph(input_shapes: list, equation: str) -> Graph: num_inputs = len(input_shapes) assert num_inputs > 0, "Einsum node must have at least one input" nodes = {} edges = [] for input_ind in range(num_inputs): input_name = 'input' + str(input_ind) parameter_op = regular_op_with_shaped_data(input_name, input_shapes[input_ind], { 'op': 'Parameter', 'type': 'Parameter' }) nodes.update(parameter_op) edges += connect(input_name, str(input_ind) + ":einsum_node") einsum_op = regular_op_with_shaped_data('einsum_node', None, { 'op': 'Einsum', 'type': 'Einsum', 'equation': equation }) nodes.update(einsum_op) result_op = result('output') nodes.update(result_op) edges += connect('einsum_node', 'output') graph = build_graph(nodes, edges, nodes_with_edges_only=True) return graph
def get_nodes(shape, axis=1): return { **regular_op_with_shaped_data('placeholder1', shape, {'type': 'Parameter', 'shape': shape, 'rt_info': RTInfo()}), **regular_op_with_shaped_data('placeholder2', [1, 1, 1, 1], {'type': 'Parameter', 'shape': [1, 1, 1, 1]}), **regular_op_with_shaped_data('mul', shape, {'type': 'Multiply'}), **regular_op_with_shaped_data('reverse_channels', shape, {'op': 'ReverseChannels', 'type': None, 'axis': int64_array(axis)}), **regular_op_with_shaped_data('pad', shape, {'type': 'Pad'}), **result('result'), }
def create_fake_quantize_net(self, il, ih, num_bits, narrow_range, nudged_il, nudged_ih, expected_step, ir_version, use_new_frontend): # original tf model import tensorflow as tf tf.compat.v1.reset_default_graph() with tf.compat.v1.Session() as sess: data = tf.compat.v1.placeholder(tf.float32, [11], 'parameter') input_min = tf.constant(il, name='input_min') input_max = tf.constant(ih, name='input_max') tf.quantization.fake_quant_with_min_max_vars(data, input_min, input_max, num_bits, narrow_range, 'fq') tf.compat.v1.global_variables_initializer() tf_net = sess.graph_def # reference graph to compare with IR ref_net = None if check_ir_version(10, None, ir_version) and not use_new_frontend: levels = 2 ** num_bits - int(narrow_range) # data (shape, value) -> const (shape, vale) -> data (shape, no value) const_for_layer_tests = lambda name, value: { **{name + '_dd': {'kind': 'data', 'value': value, 'shape': value.shape}}, **{name: {'kind': 'op', 'type': 'Const'}}, **shaped_data(name + '_d', int64_array(value.shape))} connect_const_for_layer_tests = lambda first_tensor_name, second_tensor_name: [ *connect_front(first_tensor_name + '_dd', first_tensor_name), *connect(first_tensor_name, second_tensor_name)] nodes = { **regular_op_with_shaped_data('parameter', [11], {'type': 'Parameter'}), **const_for_layer_tests('il', np.array([nudged_il], dtype=np.float32)), **const_for_layer_tests('ih', np.array([nudged_ih], dtype=np.float32)), **const_for_layer_tests('ol', np.array([nudged_il], dtype=np.float32)), **const_for_layer_tests('oh', np.array([nudged_ih], dtype=np.float32)), **regular_op_with_shaped_data('fq', [11], {'type': 'FakeQuantize', 'levels': levels}), **regular_op('result', {'type': 'Result'}), } edges = [ *connect('parameter', '0:fq'), *connect_const_for_layer_tests('il', '1:fq'), *connect_const_for_layer_tests('ih', '2:fq'), *connect_const_for_layer_tests('ol', '3:fq'), *connect_const_for_layer_tests('oh', '4:fq'), *connect('fq', 'result'), ] ref_net = build_graph(nodes, edges) return tf_net, ref_net
def test_broadcast_with_range_positive_test(self): graph = build_graph({ **regular_op_with_shaped_data('shape', [2], {'type': 'Parameter'}), **valued_const_with_data('value', np.arange(0, 384).reshape((1, 384))), **regular_op_with_empty_data('bc', {'type': 'Broadcast'}), **result(), }, [ *connect('value', '0:bc'), *connect('shape', '1:bc'), *connect('bc', 'output'), ], nodes_with_edges_only=True) ExpandRangeConstant().find_and_replace_pattern(graph) graph_ref = build_graph({ **regular_op_with_shaped_data('shape', [2], {'type': 'Parameter'}), # start **valued_const_with_data('start', np.array(0)), # limit **valued_const_with_data('minus_one', np.array(-1)), **valued_const_with_data('zero', np.array(0)), **regular_op_with_empty_data('range_dim', {'type': 'Gather'}), # delta **valued_const_with_data('delta', np.array(1)), **regular_op_with_empty_data('range', {'type': 'Range'}), # keep dims **valued_const_with_data('axes', np.array([0])), **regular_op_with_empty_data('keep_shape', {'type': 'Unsqueeze'}), **regular_op_with_empty_data('bc', {'type': 'Broadcast'}), **result(), }, [ *connect('start', '0:range'), *connect('shape', '0:range_dim'), *connect('minus_one', '1:range_dim'), *connect('zero', '2:range_dim'), *connect('range_dim', '1:range'), *connect('delta', '2:range'), *connect('range', '0:keep_shape'), *connect('axes', '1:keep_shape'), *connect('keep_shape', '0:bc'), *connect_data('shape', '1:bc'), *connect('bc', 'output'), ], nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_negative(self): nodes = { **shaped_const_with_data('input_0', [1]), **shaped_const_with_data('input_1', [1]), **shaped_const_with_data('input_2', [1]), **shaped_const_with_data('input_3', [1]), **regular_op_with_shaped_data('concat', [4], {'type': 'Concat'}), **result(), } edges = [ *connect('input_0', '0:concat'), *connect('input_1', '1:concat'), *connect('input_2', '2:concat'), *connect('input_3', '3:concat'), *connect('concat', 'output'), ] graph = build_graph(nodes, edges, nodes_with_edges_only=True) ConcatOdInputEraserAndPortsReconnect().find_and_replace_pattern(graph) graph_ref = build_graph(nodes, edges, nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_deletion_3(self): nodes = { **shaped_const_with_data('input_0', [5, 3]), **shaped_const_with_data('input_1', [5, 1]), **shaped_const_with_data('input_2', [5, 5]), **shaped_const_with_data('input_3', [5, 0]), **regular_op_with_shaped_data('concat', [5, 9], { 'type': 'Concat', 'axis': 1 }), **result(), } edges_before = [ *connect('input_0', '0:concat'), *connect('input_1', '1:concat'), *connect('input_2', '2:concat'), *connect('input_3', '3:concat'), *connect('concat', 'output'), ] edges_after = [ *connect('input_0', '0:concat'), *connect('input_1', '1:concat'), *connect('input_2', '2:concat'), *connect('concat', 'output'), ] graph = build_graph(nodes, edges_before, nodes_with_edges_only=True) ConcatOdInputEraserAndPortsReconnect().find_and_replace_pattern(graph) graph_ref = build_graph(nodes, edges_after, nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_deletion_trailing_unconnected_ports(self): nodes = { **shaped_const_with_data('input_0', [5, 3]), **regular_op_with_shaped_data('concat', [5, 3], { 'type': 'Concat', 'axis': 1 }), **result(), } edges_before = [ *connect('input_0', '0:concat'), *connect('concat', 'output'), ] edges_after = [ *connect('input_0', '0:concat'), *connect('concat', 'output'), ] graph = build_graph(nodes, edges_before, nodes_with_edges_only=True) Node(graph, 'concat').add_input_port(1) ConcatOdInputEraserAndPortsReconnect().find_and_replace_pattern(graph) graph_ref = build_graph(nodes, edges_after, nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp) self.assertTrue(1 not in Node(graph, 'concat').in_ports())
def get_graphs(input_shape, reshape_0_pattern, order, reshape_1_pattern, block_size): nodes = { **regular_op_with_shaped_data('input', input_shape, {'type': 'Parameter', 'shape': int64_array(input_shape), 'infer': Parameter.infer}), **valued_const_with_data('reshape_0_pattern', int64_array(reshape_0_pattern)), **regular_op_with_empty_data('reshape_0', {'type': 'Reshape', 'infer': Reshape.infer}), **valued_const_with_data('order', int64_array(order)), **regular_op_with_empty_data('transpose', {'type': 'Transpose', 'infer': Transpose.infer}), **valued_const_with_data('reshape_1_pattern', int64_array(reshape_1_pattern)), **regular_op_with_empty_data('reshape_1', {'type': 'Reshape', 'infer': Reshape.infer, 'name': 'final_reshape'}), **result(), } edges = [ *connect('input', '0:reshape_0'), *connect('reshape_0_pattern', '1:reshape_0'), *connect('reshape_0', '0:transpose'), *connect('order', '1:transpose'), *connect('transpose', '0:reshape_1'), *connect('reshape_1_pattern', '1:reshape_1'), *connect('reshape_1', 'output'), ] graph = build_graph(nodes, edges, nodes_with_edges_only=True, cli=Namespace()) for node in graph.get_op_nodes(): node['op'] = node['type'] graph.clean_up() ref_nodes = { **regular_op_with_shaped_data('input', input_shape, {'type': 'Parameter', 'shape': int64_array(input_shape), 'infer': Parameter.infer}), **regular_op_with_empty_data('depth_to_space', {'type': 'DepthToSpace', 'infer': DepthToSpaceOp.infer, 'name': 'final_reshape', 'block_size': block_size}), **result() } ref_edges = [*connect('input', 'depth_to_space'), *connect('depth_to_space', 'output')] graph_ref = build_graph(ref_nodes, ref_edges, nodes_with_edges_only=True) for node in graph_ref.get_op_nodes(): node['op'] = node['type'] graph_ref.clean_up() graph.graph['layout'] = 'NCHW' graph_ref.graph['layout'] = 'NCHW' return graph, graph_ref
def test_transpose_insert(self, nhwc_to_nchw_order, nchw_to_nhwc_order, add_permutation_attrs): graph_nodes = { **valued_const_with_data('transpose_parameter_order', np.array(nhwc_to_nchw_order)), **valued_const_with_data('transpose_result_order', np.array(nchw_to_nhwc_order)) } graph_nodes.update(nodes) shape_len = len(nhwc_to_nchw_order) if add_permutation_attrs else 3 shape = np.array(range(shape_len)) add_shape = shape if nhwc_to_nchw_order is None else shape[nhwc_to_nchw_order] graph_nodes.update( { **regular_op_with_shaped_data('placeholder1', shape, {'type': 'Parameter', 'rt_info': RTInfo(), 'shape': shape}), **regular_op_with_shaped_data('result', shape, {'type': 'Result', 'rt_info': RTInfo(), 'shape': shape}), **regular_op_with_shaped_data('add', add_shape, {'type': 'Add', 'op': 'Add', 'infer': copy_shape_infer}), } ) graph = build_graph(graph_nodes, edges) graph_ref = build_graph(graph_nodes, edges_with_transpose if add_permutation_attrs else edges) param_node = Node(graph, 'placeholder1') result_node = Node(graph, 'result') if add_permutation_attrs: shape_len = len(nhwc_to_nchw_order) param_node['permute_attrs'] = PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')]) param_node.out_node(0)['permutation'] = PermuteAttrs().get_nhwc_to_nchw_permutation(shape_len) result_node.in_node(0)['permutation'] = PermuteAttrs().get_nhwc_to_nchw_permutation(shape_len) PreserveRuntimeInfo().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertFalse(param_node.has_valid('permute_attrs')) self.assertFalse(param_node.out_node(0).has_valid('permutation')) if add_permutation_attrs: rt_info = param_node.rt_info.info old_api_map = rt_info[('old_api_map_order', 0)].info self.assertTrue(np.array_equal(old_api_map['inverse_order'], nchw_to_nhwc_order)) rt_info = result_node.rt_info.info old_api_map = rt_info[('old_api_map_order', 0)].info self.assertTrue(np.array_equal(old_api_map['order'], nhwc_to_nchw_order))
def get_graphs(input_shape, reshape_0_pattern, order, reshape_1_pattern, group): nodes = { **regular_op_with_shaped_data('input', input_shape, {'type': 'Parameter', 'shape': int64_array(input_shape), 'infer': Parameter.infer}), **valued_const_with_data('reshape_0_pattern', int64_array(reshape_0_pattern)), **regular_op_with_empty_data('reshape_0', {'type': 'Reshape', 'infer': Reshape.infer}), **valued_const_with_data('order', int64_array(order)), **regular_op_with_empty_data('transpose', {'type': 'Transpose', 'infer': Transpose.infer}), **valued_const_with_data('reshape_1_pattern', int64_array(reshape_1_pattern)), **regular_op_with_empty_data('reshape_1', {'type': 'Reshape', 'infer': Reshape.infer, 'name': 'final_reshape'}), **result(), } edges = [ *connect('input', '0:reshape_0'), *connect('reshape_0_pattern', '1:reshape_0'), *connect('reshape_0', '0:transpose'), *connect('order', '1:transpose'), *connect('transpose', '0:reshape_1'), *connect('reshape_1_pattern', '1:reshape_1'), *connect('reshape_1', 'output'), ] graph = build_graph(nodes, edges, nodes_with_edges_only=True) for node in graph.get_op_nodes(): node['op'] = node['type'] graph.clean_up() ref_nodes = { **regular_op_with_shaped_data('input', input_shape, {'type': 'Parameter', 'shape': int64_array(input_shape), 'infer': Parameter.infer}), **regular_op_with_empty_data('shuffle_channel', {'type': 'ShuffleChannels', 'infer': ShuffleChannels.infer, 'name': 'final_reshape', 'group': group}), **result() } ref_edges = [*connect('input', 'shuffle_channel'), *connect('shuffle_channel', 'output')] graph_ref = build_graph(ref_nodes, ref_edges, nodes_with_edges_only=True) for node in graph_ref.get_op_nodes(): node['op'] = node['type'] graph_ref.clean_up() return graph, graph_ref
def generate_nodes(data, axis=-1, depth=4, on_value=1., off_value=0.): return { 'indices': {'Op': 'Parameter', 'value': data, 'shape': int64_array(data.shape)}, 'indices_d': {'kind': 'data', 'value': data, 'shape': int64_array(data.shape)}, **valued_const_with_data('depth', int64_array(depth)), **valued_const_with_data('on_value', float_array(on_value)), **valued_const_with_data('off_value', float_array(off_value)), **regular_op_with_shaped_data('one_hot', None, {'type': 'OneHot', 'axis': axis, 'Op': 'OneHot'}) }
def test_zero_point_optimization(self, weights, zero_point, adj_weights, adj_zero_point): nodes = lambda w, zp: { **valued_const_with_data('weights', np.array(w, dtype=np.int8)), **regular_op_with_shaped_data( 'cast', len(w), { 'type': 'Convert', 'op': 'Cast', 'infer': Cast.infer, 'dst_type': np.float32 }), **valued_const_with_data('zp', np.array(zp, dtype=np.float32)), **regular_op_with_shaped_data( 'sub', len(w), { 'type': 'Subtract', 'op': 'Sub', 'infer': lambda node: eltwise_infer(node, Sub.operation) }), **result() } edges = [ *connect("weights:0", "0:cast"), *connect("cast:0", "0:sub"), *connect("zp:0", "1:sub"), *connect("sub:0", "0:output"), ] graph = build_graph(nodes(weights, zero_point), edges, nodes_with_edges_only=True) ZeroPointOptimizer().find_and_replace_pattern(graph) graph.clean_up() graph_ref = build_graph(nodes(adj_weights, adj_zero_point), [ *connect("weights:0", "0:cast"), *connect("cast:0", "0:output"), ], nodes_with_edges_only=True) graph_ref.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_leaky_relu_mul_multiple_consumers(self): # multiple consumers of Mul operation graph = build_graph_with_edge_attrs(nodes, edges, {}) additional_result = Result(graph, {'name': 'result_2'}).create_node() Node(graph, 'mul').out_port(0).connect(additional_result.in_port(0)) ref_nodes = { **regular_op_with_shaped_data('input', shape, { 'type': 'Parameter', 'op': 'Parameter' }), **regular_op_with_shaped_data('mul', shape, { 'type': 'Multiply', 'name': 'mul' }), **regular_op_with_shaped_data('max', shape, { 'type': 'Maximum', 'name': 'final_max' }), **valued_const_with_data('const', float_array([0.5])), **regular_op_with_shaped_data('leaky_relu', shape, { 'type': 'LeakyReLU', 'name': 'max_final', 'negative_slope': None }), **result('result'), **result('result_2') } ref_edges = [ *connect('input:0', '0:mul'), *connect('const', '1:mul'), *connect('max:0', 'result'), *connect('mul:0', 'result_2'), *connect_data('input', 'leaky_relu'), *connect('leaky_relu', 'result') ] graph_ref = build_graph_with_edge_attrs(ref_nodes, ref_edges) LeakyReLUFusion().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) (flag, resp) = compare_graphs(graph, graph_ref, 'result_2') self.assertTrue(flag, resp)
def setUp(self): nodes = { **regular_op_with_shaped_data('boxes', [10, 100, 4], {'type': 'Parameter'}), **regular_op_with_shaped_data('scores', [10, 5, 100], {'type': 'Parameter'}), **valued_const_with_data('max_output_per_class', int64_array(7)), **regular_op('nms', {'op': 'NonMaxSuppression', 'type': 'NonMaxSuppression', 'name': 'nms'}), **empty_data('nms_data_0'), **empty_data('nms_data_1'), **empty_data('nms_data_2'), **result('output_0'), **result('output_1'), **result('output_2'), } self.graph = build_graph(nodes, [ *connect('boxes', '0:nms'), *connect('scores', '1:nms'), *connect('max_output_per_class', '2:nms'), *connect('nms:0', 'nms_data_0', front_phase=True), # Use this WA for correct creating operation *connect('nms_data_0', 'output_0', front_phase=True), # with multiple outputs ], nodes_with_edges_only=True) self.graph_nms_5_2_outs = build_graph(nodes, [ *connect('boxes', '0:nms'), *connect('scores', '1:nms'), *connect('max_output_per_class', '2:nms'), *connect('nms:0', 'nms_data_0', front_phase=True), # Use this WA for correct creating operation *connect('nms_data_0', 'output_0', front_phase=True), # with multiple outputs *connect('nms:1', 'nms_data_1', front_phase=True), *connect('nms_data_1', 'output_1', front_phase=True), ], nodes_with_edges_only=True) self.graph_nms_5_3_outs = build_graph(nodes, [ *connect('boxes', '0:nms'), *connect('scores', '1:nms'), *connect('max_output_per_class', '2:nms'), *connect('nms:0', 'nms_data_0', front_phase=True), # Use this WA for correct creating operation *connect('nms_data_0', 'output_0', front_phase=True), # with multiple outputs *connect('nms:1', 'nms_data_1', front_phase=True), *connect('nms_data_1', 'output_1', front_phase=True), *connect('nms:2', 'nms_data_2', front_phase=True), *connect('nms_data_2', 'output_2', front_phase=True), ], nodes_with_edges_only=True)
def setUp(self): nodes = { **regular_op_with_shaped_data('data', [20, 100, 4], {'type': 'Parameter', 'value': None, '_out_port_data_type': {0: np.float32}}), **valued_const_with_data('k', int64_array(10)), **regular_op_with_shaped_data('topk', None, {'op': 'TopK', 'type': 'TopK', 'name': 'topk', 'axis': 1}), 'topk_d2': {'kind': 'data', 'shape': None, 'value': None}, **result('output_1'), **result('output_2'), } self.graph = build_graph(nodes, [ *connect('data', '0:topk'), *connect('k', '1:topk'), ('topk', 'topk_d', {'out': 0}), ('topk', 'topk_d2', {'out': 1}), ('topk_d', 'output_1'), ('topk_d2', 'output_2'), ], nodes_with_edges_only=True)
def test_v10_group_convolution_resolver_depthwise_conv2d(self): nodes = { **regular_op_with_shaped_data('input', [1, 1, 224, 224], { 'type': 'Parameter' }), **valued_const_with_data('weights', np.ones([1, 8, 7, 7])), **valued_const_with_data('dim', int64_array([1, 8, 1, 7, 7])), **regular_op_with_empty_data('reshape', {'type': 'Reshape'}), **regular_op_with_shaped_data( 'convolution', None, { 'type': 'Convolution', 'group': 1, 'output': 8, 'op': 'DepthwiseConv2dNative' }), **result(), } graph = build_graph(nodes, [ *connect('input', '0:convolution'), *connect('weights', '1:convolution'), *connect('convolution', 'output'), ], nodes_with_edges_only=True) V10ConvolutionWithGroupsResolver().find_and_replace_pattern(graph) nodes['convolution']['type'] = 'GroupConvolution' del nodes['convolution']['group'] graph_ref = build_graph(nodes, [ *connect('input', '0:convolution'), *connect('weights', '0:reshape'), *connect('dim', '1:reshape'), *connect('reshape', '1:convolution'), *connect('convolution', 'output'), ], nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, last_node='output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_v7_group_convolution_resolver(self): nodes = { **regular_op_with_shaped_data('input', [1, 3, 224, 224], { 'type': 'Parameter' }), **valued_const_with_data('weights', np.ones([3, 8, 7, 7])), **valued_const_with_data('dim', int64_array([24, -1, 0, 0])), **regular_op_with_empty_data('reshape', {'type': 'Reshape'}), **regular_op_with_shaped_data('convolution', None, { 'type': 'Convolution', 'group': 3, 'output': 24 }), **result(), } graph = build_graph(nodes, [ *connect('input', '0:convolution'), *connect('weights', '1:convolution'), *connect('convolution', 'output'), ], nodes_with_edges_only=True) V7ConvolutionWithGroupsResolver().find_and_replace_pattern(graph) graph_ref = build_graph(nodes, [ *connect('input', '0:convolution'), *connect('weights', '0:reshape'), *connect('dim', '1:reshape'), *connect('reshape', '1:convolution'), *connect('convolution', 'output'), ], nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, last_node='output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_not_useless_pad_non_constant_input(self): nodes = { **regular_op_with_shaped_data('placeholder', [10, 20, 3], { 'type': 'Parameter' }), **regular_op_with_shaped_data('shape_of_1', [3], { 'type': 'ShapeOf' }), **regular_op_with_shaped_data('sub', [3], { 'type': 'Subtract', 'op': 'Sub' }), **valued_const_with_data('desired_output_size', int64_array([10, 20, 3])), **regular_op_with_shaped_data('pad', [10, 20, 3], { 'type': 'Pad', 'op': 'Pad' }), **valued_const_with_data('fill_value', np.array(1)), **result('result'), } edges = [ *connect('placeholder', '0:pad'), *connect('placeholder', 'shape_of_1'), *connect('shape_of_1', '0:sub'), *connect('desired_output_size', '1:sub'), *connect('sub', '1:pad'), *connect_data('sub', '2:pad'), *connect('fill_value', '3:pad'), *connect('pad', 'result'), ] graph = build_graph(nodes, edges) RemoveUselessPad().find_and_replace_pattern(graph) ref_graph = build_graph(nodes, edges) (flag, resp) = compare_graphs(graph, ref_graph, 'result') self.assertTrue(flag, resp)
def setUpClass(cls): cls.nodes_attributes = { **regular_op_with_shaped_data('placeholder', [1, 10, 20, 3], { 'type': 'Parameter' }), **regular_op_with_shaped_data('transpose', [3, 20, 10, 1], { 'type': 'Transpose', 'op': 'Transpose', 'reverse_order': True }), **result('result'), } cls.ref_nodes_attributes = { **regular_op_with_shaped_data('placeholder', [1, 10, 20, 3], { 'type': 'Parameter' }), **regular_op_with_shaped_data('transpose', [3, 20, 10, 1], { 'type': 'Transpose', 'op': 'Transpose' }), **valued_const_with_data('transpose_order', np.array([3, 2, 1, 0])), **result('result'), }
def test_run_with_solitary_shapeof_in_shape_value_subgraph(self): # in this case MarkNodesWithShapeValues must leave graph unchanged # so reference nodes are exactly the same inp_shape_1 = int64_array((1, 3, 100, 100)) inp_shape_2 = int64_array((1, 3, 100, 50)) # inp_2 and const will be concatenated to (1, 3, 200, 50) const_shape = int64_array((1, 3, 100, 50)) nodes = { **regular_op_with_shaped_data('input_1', inp_shape_1, {'op': 'Parameter', 'type': 'Parameter'}), **regular_op_with_shaped_data('input_2', inp_shape_2, {'op': 'Parameter', 'type': 'Parameter', 'returns_shape_value': False}), **shaped_const_with_data('const', const_shape), **regular_op_with_empty_data('concat', {'op': 'Concat', 'type': 'Concat', 'axis': 2, 'returns_shape_value': False}), **regular_op_with_empty_data('shapeof', {'op': 'ShapeOf', 'type': 'ShapeOf'}), **regular_op_with_empty_data('reshape', {'op': 'Reshape', 'type': 'Reshape'}), **result('res'), } edges = [ *connect('input_1', '0:reshape'), *connect('input_2', '0:concat'), *connect('const', '1:concat'), *connect('concat', 'shapeof'), *connect('shapeof', '1:reshape'), *connect('reshape', 'res'), ] graph = build_graph(nodes, edges) MarkNodesWithShapeValues().find_and_replace_pattern(graph) graph_ref = build_graph(nodes, edges) (flag, resp) = compare_graphs(graph, graph_ref, 'res', check_op_attrs=True) self.assertTrue(flag, "'returns_shape_value' should be False or unset for ShapeOf input nodes" + ': ' + str(resp))