def test_deletion(self): nodes = { **shaped_const_with_data('input_0', [1]), **shaped_const_with_data('input_1', [1]), **shaped_const_with_data('input_2', [0]), **shaped_const_with_data('input_3', [1]), **regular_op_with_shaped_data('concat', [3], {'type': 'Concat'}), **result(), } edges_before = [ *connect('input_0', '0:concat'), *connect('input_1', '1:concat'), *connect('input_2', '2:concat'), *connect('input_3', '3:concat'), *connect('concat', 'output'), ] edges_after = [ *connect('input_0', '0:concat'), *connect('input_1', '1:concat'), *connect('input_3', '3:concat'), *connect('concat', 'output'), ] graph = build_graph(nodes, edges_before, nodes_with_edges_only=True) ConcatOdInputEraser().find_and_replace_pattern(graph) graph_ref = build_graph(nodes, edges_after, nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_deletion_unconnected_ports(self): nodes = { **shaped_const_with_data('input_0', [5, 3]), **shaped_const_with_data('input_4', [5, 1]), **shaped_const_with_data('input_7', [5, 2]), **regular_op_with_shaped_data('concat', [5, 6], { 'type': 'Concat', 'axis': 1 }), **result(), } edges_before = [ *connect('input_0', '0:concat'), *connect('input_4', '4:concat'), *connect('input_7', '7:concat'), *connect('concat', 'output'), ] edges_after = [ *connect('input_0', '0:concat'), *connect('input_4', '1:concat'), *connect('input_7', '2:concat'), *connect('concat', 'output'), ] graph = build_graph(nodes, edges_before, nodes_with_edges_only=True) ConcatOdInputEraserAndPortsReconnect().find_and_replace_pattern(graph) graph_ref = build_graph(nodes, edges_after, nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_accuracy(self, data, in_low, in_high, out_low, out_high, levels): nodes = nodes_dict(np.float32, None, levels, data, in_low, in_high, out_low, out_high) graph = build_graph(nodes, [ *connect('weights:0', '0:FQ'), *connect('il:0', '1:FQ'), *connect('ih:0', '2:FQ'), *connect('ol:0', '3:FQ'), *connect('oh:0', '4:FQ'), *connect('FQ:0', 'output'), ], nodes_with_edges_only=True) graph_ref = graph.copy() CompressQuantizeWeights().find_and_replace_pattern(graph) for node in graph.get_op_nodes() + graph_ref.get_op_nodes(): node['stop_value_propagation'] = False node['need_shape_inference'] = node.soft_get( 'need_shape_inference', True) graph.clean_up() graph_ref.clean_up() const_result_graph = build_graph( { **shaped_const_with_data('weights', np.array(data).shape), **result() }, [*connect('weights', 'output')], nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, const_result_graph, 'output', check_op_attrs=True) self.assertTrue(flag, resp) (flag, resp) = compare_graphs(graph_ref, const_result_graph, 'output', check_op_attrs=True) self.assertTrue(flag, resp) # as this two graphs calculated the same data through different constant folding functions, they resulted in # constants of different data type since FakeQuantize always have f32 output dtype, but eltwises use numpy # for folding which doesn't have such restriction const_node = graph.get_op_nodes(type='Const') self.assertEqual(len(const_node), 1) if const_node[0].data_type == np.float64: const_node[0].data_type = np.float32 (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) self.assertTrue(flag, resp)
def test_assertion_error(self): nodes = { **shaped_const_with_data('input_0', [0]), **shaped_const_with_data('input_1', [0]), **shaped_const_with_data('input_2', [0]), **shaped_const_with_data('input_3', [0]), **regular_op_with_shaped_data('concat', [0], {'type': 'Concat'}), **result(), } edges = [ *connect('input_0', '0:concat'), *connect('input_1', '1:concat'), *connect('input_2', '2:concat'), *connect('input_3', '3:concat'), *connect('concat', 'output'), ] graph = build_graph(nodes, edges, nodes_with_edges_only=True) self.assertRaises(AssertionError, ConcatOdInputEraser().find_and_replace_pattern, graph)
def test_pool_v2_to_attributed_pool(self): nodes = { **shaped_const_with_data('input', int64_array([200, 200])), **valued_const_with_data('windows', int64_array([4, 4])), **valued_const_with_data('strides', int64_array([4, 4])), **regular_op_with_empty_data( 'pool_v2', { 'op': 'PoolingV2', 'pad': [2, 2], 'spatial_dims': [1, 2], 'auto_pad': 'same_upper', 'output_spatial_shape': [2, 3], 'pad_spatial_shape': [1, 2], 'pool_method': 'max', 'permute_attrs': None }), **regular_op_with_empty_data( 'pool_v1', { 'type': 'Pooling', 'pad': [2, 2], 'spatial_dims': [1, 2], 'auto_pad': 'same_upper', 'output_spatial_shape': [2, 3], 'pad_spatial_shape': [1, 2], 'pool_method': 'max' }), **result('output') } edges = [ *connect('input', 'pool_v2:0'), *connect('windows', 'pool_v2:1'), *connect('strides', 'pool_v2:2'), *connect('pool_v2', 'output'), ] graph = build_graph(nodes, edges, nodes_with_edges_only=True) PoolV2ToAttributedPool().find_and_replace_pattern(graph) ref_graph = build_graph( nodes, [*connect('input', 'pool_v1'), *connect('pool_v1', 'output')], nodes_with_edges_only=True) (flag, resp) = compare_graphs(graph, ref_graph, 'output') self.assertTrue(flag, resp)
def convert_args(val, name=''): if val is not None: return valued_const_with_data(name, int64_array(val)) else: return shaped_const_with_data(name, [0]) #fake shape