def test_not_useless_pad_constant_input(self):
        nodes = {
            **regular_op_with_shaped_data('placeholder', [1, 10, 20, 3], {
                                              'type': 'Parameter'
                                          }),
            **regular_op_with_shaped_data('pad', [1, 10, 20, 3], {
                                              'type': 'Pad',
                                              'op': 'Pad'
                                          }),
            **valued_const_with_data('pads_begin', int64_array([0, 0, 0, 0])),
            **valued_const_with_data('pads_end', int64_array([0, 1, 0, 0])),
            **valued_const_with_data('fill_value', np.array(1)),
            **result('result'),
        }
        edges = [
            *connect('placeholder', '0:pad'),
            *connect('pads_begin', '1:pad'),
            *connect('pads_end', '2:pad'),
            *connect('fill_value', '3:pad'),
            *connect('pad', 'result'),
        ]
        graph = build_graph(nodes, edges)
        RemoveUselessPad().find_and_replace_pattern(graph)
        ref_graph = build_graph(nodes, edges)

        (flag, resp) = compare_graphs(graph, ref_graph, 'result')
        self.assertTrue(flag, resp)
    def test_2_inputs(self):
        nodes = {
            **regular_op_with_shaped_data('placeholder', [1, 3, 20, 20], {
                                              'type': 'Parameter'
                                          }),
            **regular_op_with_shaped_data('a_clamp', [1, 3, 20, 20], {
                                              'type': None,
                                              'op': 'Clamp'
                                          }),
            **regular_op_with_shaped_data('clamp', [1, 3, 20, 20], {
                                              'type': 'Clamp',
                                              'op': 'AttributedClamp',
                                              'min': -3.5,
                                              'max': 3.5
                                          }),
            **valued_const_with_data('min', np.array(-3.5)),
            **valued_const_with_data('max', np.array(3.5)),
            **result('result'),
        }
        edges = [
            *connect('placeholder', '0:a_clamp'),
            *connect('min', '1:a_clamp'),
            *connect('max', '2:a_clamp'),
            *connect('a_clamp', 'result'),
        ]
        graph = build_graph(nodes, edges)
        ClampNormalizer().find_and_replace_pattern(graph)
        ref_graph = build_graph(
            nodes,
            [*connect('placeholder', '0:clamp'), *connect('clamp', 'result')])

        (flag, resp) = compare_graphs(graph, ref_graph, 'result')
        self.assertTrue(flag, resp)
    def test_gather_tree_normalizer(self):
        nodes = {
            **regular_op_with_shaped_data('data_0', [100, 1, 10], {'type': 'Parameter'}),
            **regular_op_with_shaped_data('data_1', [100, 1, 10], {'type': 'Parameter'}),
            **regular_op_with_shaped_data('data_2', [1], {'type': 'Parameter'}),
            **regular_op_with_shaped_data('gather_tree', [1], {'type': 'GatherTree'}),
            **valued_const_with_data('const', np.array([2])),
            **result('result'),
        }
        edges = [*connect('data_0', '0:gather_tree'),
                 *connect('data_1', '1:gather_tree'),
                 *connect('data_2', '2:gather_tree'),
                 *connect('const', '3:gather_tree'),
                 *connect('gather_tree', 'result'),
                 ]
        ref_edges = [*connect('data_0', '0:gather_tree'),
                     *connect('data_1', '1:gather_tree'),
                     *connect('data_2', '2:gather_tree'),
                     *connect('const', '0:squeeze'),
                     *connect('squeeze_axis', '1:squeeze'),
                     *connect('squeeze', '3:gather_tree'),
                     *connect('gather_tree', 'result'),]
        ref_nodes = nodes.copy()
        ref_nodes.update({**valued_const_with_data('squeeze_axis', int64_array([0])),
                          **regular_op_with_shaped_data('squeeze', [], {'type': 'Squeeze'})})
        graph = build_graph(nodes, edges)
        GatherTreeNormalizer().find_and_replace_pattern(graph)
        # run shape inference to make sure that shape overriding happened
        shape_inference(graph)

        ref_graph = build_graph(ref_nodes, ref_edges)

        (flag, resp) = compare_graphs(graph, ref_graph, 'result')
        self.assertTrue(flag, resp)
Beispiel #4
0
    def test_scatterelements_value_infer(self, data, indices, updates, axis, ref_res):
        nodes = {
            **valued_const_with_data('data', np.array(data)),
            **valued_const_with_data('indices', int64_array(indices)),
            **valued_const_with_data('updates', np.array(updates)),
            **valued_const_with_data('axis', int64_array(axis)),
            **regular_op_with_empty_data('scatter_elements', {'op': 'ScatterElementsUpdate', 'axis': axis}),
            **result()
        }

        graph = build_graph(nodes_attrs=nodes, edges=[
            *connect('data', '0:scatter_elements'),
            *connect('indices', '1:scatter_elements'),
            *connect('updates', '2:scatter_elements'),
            *connect('axis', '3:scatter_elements'),
            *connect('scatter_elements', 'output')
        ], nodes_with_edges_only=True)
        graph.stage = 'middle'

        scatter_el_node = Node(graph, 'scatter_elements')
        ScatterElementsUpdate.infer(scatter_el_node)

        res_output_shape = scatter_el_node.out_node().shape
        self.assertTrue(np.array_equal(int64_array(ref_res).shape, res_output_shape))

        res_output_value = scatter_el_node.out_node().value
        self.assertTrue(np.array_equal(ref_res, res_output_value))
    def test_gatherelements_value_infer(self, data, indices, axis, ref_res):
        nodes = {
            **valued_const_with_data('data', int64_array(data)),
            **valued_const_with_data('indices', int64_array(indices)),
            **regular_op_with_empty_data('gather_elements', {
                'op': 'GatherElements',
                'axis': axis
            }),
            **result()
        }

        graph = build_graph(nodes_attrs=nodes,
                            edges=[
                                *connect('data', '0:gather_elements'),
                                *connect('indices', '1:gather_elements'),
                                *connect('gather_elements', 'output')
                            ],
                            nodes_with_edges_only=True)
        graph.stage = 'middle'

        gather_el_node = Node(graph, 'gather_elements')
        GatherElements.infer(gather_el_node)

        res_output_shape = gather_el_node.out_node().shape
        self.assertTrue(
            np.array_equal(int64_array(ref_res).shape, res_output_shape))

        res_output_value = gather_el_node.out_node().value
        if res_output_value is not None:
            self.assertTrue(
                np.array_equal(int64_array(ref_res), res_output_value))
Beispiel #6
0
def build_range_test_graphs(start=0, limit=10, delta=1, dst_type_str='FP16',
                            src_type_str='FP32', returns_shape_value=None):
    nodes = {
        **valued_const_with_data('start', float32_array(start)),
        **valued_const_with_data('limit', float32_array(limit)),
        **valued_const_with_data('delta', float32_array(delta)),
        **regular_op_with_empty_data('range', {'type': 'Range', 'op': 'Range',
                                               'returns_shape_value': returns_shape_value,
                                               'output_type': data_type_str_to_np(src_type_str),
                                               'infer': Range.infer}),
        **result('res'),
    }

    nodes_ref = deepcopy(nodes)
    nodes_ref.update({
        **regular_op_with_empty_data('range', {'type': 'Range', 'op': 'Range',
                                               'returns_shape_value': returns_shape_value,
                                               'output_type': data_type_str_to_np(dst_type_str),
                                               'infer': Range.infer}),
    })

    edges = [
        *connect('start', '0:range'),
        *connect('limit', '1:range'),
        *connect('delta', '2:range'),
        *connect('range', 'res'),
    ]
    graph = build_graph(nodes, edges)
    graph_ref = build_graph(nodes_ref, edges)

    graph = partial_infer(graph)

    graph.graph['cmd_params'].data_type = dst_type_str
    convert_blobs(graph, dst_type_str)
    return graph, graph_ref
Beispiel #7
0
    def build_and_test_value_inference(data,
                                       indices,
                                       axis,
                                       batch_dims,
                                       ref_value,
                                       negative_test_string=None):
        nodes = {
            **valued_const_with_data('data', int64_array(data)),
            **valued_const_with_data('indices', int64_array(indices)),
            **valued_const_with_data('axis', int64_array(axis)),
            **regular_op_with_empty_data('gather', {
                'op': 'Gather',
                'batch_dims': batch_dims,
                'infer': Gather.infer
            }),
            **result('res'),
        }

        edges = [
            *connect('data', '0:gather'), *connect('indices', '1:gather'),
            *connect('axis', '2:gather'), *connect('gather', 'res')
        ]

        graph = build_graph(nodes, edges)
        graph.stage = 'middle'
        partial_infer(graph)

        node = Node(graph, 'gather')
        res = node.out_port(0).data.get_value()
        npt.assert_array_equal(res, ref_value)
Beispiel #8
0
    def test_broadcast(self, data, target_shape, axes_mapping=None, mode='numpy', ref_out=None, test_raising=False):
        if ref_out is not None:
            input = valued_const_with_data('data', int64_array(data))
        else:
            input = shaped_data('data', int64_array(data))

        nodes = {
            **input,
            **valued_const_with_data('target_shape', int64_array(target_shape)),
            **regular_op_with_empty_data('broadcast', {'op': 'Broadcast', 'mode': mode}),
        }

        edges = [('data', 'broadcast'),
                 ('target_shape', 'broadcast'),
                 ('broadcast', 'broadcast_d')]

        if axes_mapping is not None:
            nodes.update(**valued_const_with_data('axes_mapping', int64_array(axes_mapping)))
            edges.append(('axes_mapping', 'broadcast'))
        graph = build_graph(nodes, edges)

        broadcast_node = Node(graph, 'broadcast')
        if test_raising:
            self.assertRaises(AssertionError, Broadcast.infer, broadcast_node)
            return

        Broadcast.infer(broadcast_node)
        if ref_out is not None:
            self.assertTrue(np.array_equal(broadcast_node.out_node().value, np.array(ref_out)))
        else:
            self.assertTrue(np.array_equal(broadcast_node.out_node().shape, np.array(target_shape)))
    def nodes(self, input_shape, transpose_shape, fq_shape, is_input_const):
        nodes = {
            **valued_const_with_data('il', np.array([[[[0]]]])),
            **valued_const_with_data('ih', np.array([[[[255]]]])),
            **valued_const_with_data('ol', np.array([[[[0]]]])),
            **valued_const_with_data('oh', np.array([[[[255]]]])),
            **regular_op_with_shaped_data(
                'FQ', fq_shape,
                dict(type='FakeQuantize',
                     op='FakeQuantize',
                     infer=FakeQuantize.infer)),
            **valued_const_with_data('order', int64_array([0, 2, 3, 1])),
            **regular_op_with_shaped_data(
                'transpose', transpose_shape,
                dict(type='Transpose', op='Transpose', infer=Transpose.infer)),
            **regular_op_with_shaped_data('relu', fq_shape,
                                          dict(type='Relu', op='Relu')),
            **result(),
        }

        if is_input_const:
            input_node = shaped_const_with_data('input', input_shape)
        else:
            input_node = regular_op_with_shaped_data(
                'input', input_shape, dict(type='Parameter', op='Parameter'))

        nodes.update(input_node)
        return nodes
Beispiel #10
0
    def test_fake_results(self):
        then_graph_nodes = {**valued_const_with_data('fake_const', int64_array(0)),
                            **regular_op_with_empty_data('shapeof',
                                         {'kind': 'op', 'type': 'ShapeOf', 'op': 'ShapeOf', 'infer': Shape.infer,
                                          'output_type': np.int64}),
                            **regular_op_with_empty_data('res_1', {'kind': 'op', 'type': 'Result', 'op': 'Result',
                                                                   'infer': lambda x: 0, 'output_id': 0})}
        then_graph_edges = [*connect('fake_const', 'shapeof'),
                            *connect('shapeof', 'res_1'),
                            ]

        else_graph_nodes = {**regular_op_with_empty_data('param_1', {'type': 'Parameter', 'kind': 'op', 'input_id': 1,
                                                                     'shape': None, 'infer': Parameter.infer}),
                            **regular_op_with_empty_data('res_1', {'kind': 'op', 'type': 'Result', 'op': 'Result',
                                                                   'infer': lambda x: 0, 'output_id': 0})}
        else_graph_edges = [*connect('param_1', 'res_1')]
        then_graph = build_graph_with_edge_attrs(then_graph_nodes, then_graph_edges)
        else_graph = build_graph_with_edge_attrs(else_graph_nodes, else_graph_edges)
        external_graph_nodes = {
            **valued_const_with_data('cond', np.array([True], dtype=np.bool)),
            **valued_const_with_data('input_1', int64_array([[1, 2, 3], [3, 2, 3]])),
            **regular_op_with_empty_data('if', {'kind': 'op', 'op': 'If', 'then_graph': then_graph,
                                                'else_graph': else_graph, 'infer': If.infer}),
            **result('res_1')}
        external_graph_edges = [*connect('cond', '0:if'),
                                *connect('input_1', '1:if'),
                                *connect('if', 'res_1')]

        graph = build_graph(external_graph_nodes, external_graph_edges)
        graph.stage = 'middle'
        partial_infer(graph)
        res_1 = Node(graph, 'res_1')
        npt.assert_array_equal(res_1.in_port(0).data.get_shape(), int64_array([2,3]))
Beispiel #11
0
def generate_nodes(data, axis=-1, depth=4, on_value=1., off_value=0.):
    return {
        'indices': {'Op': 'Parameter', 'value': data, 'shape': int64_array(data.shape)},
        'indices_d': {'kind': 'data', 'value': data, 'shape': int64_array(data.shape)},
        **valued_const_with_data('depth', int64_array(depth)),
        **valued_const_with_data('on_value', float_array(on_value)),
        **valued_const_with_data('off_value', float_array(off_value)),
        **regular_op_with_shaped_data('one_hot', None, {'type': 'OneHot', 'axis': axis, 'Op': 'OneHot'})
    }
    def build_and_test_shape_inference(self,
                                       input_indices_sparse_shape,
                                       input_actual_shape,
                                       new_shape,
                                       ref_out_shape,
                                       input_indices=None,
                                       ref_out_indices=None):
        # sparse tensor is stored in COO format
        nodes = {
            **shaped_parameter('input_indices',
                               shape_array(input_indices_sparse_shape), {
                                   'value': input_indices
                               }),
            **valued_const_with_data('input_shape',
                                     shape_array(input_actual_shape)),
            **valued_const_with_data('new_shape', shape_array(new_shape)),
            **regular_op_with_empty_data(
                'sparse_reshape_node', {
                    'op': 'SparseReshape',
                    'special_zero': True,
                    'infer': SparseReshape.infer
                }),
            **empty_data('sparse_reshape_node_d:out_port_1'),
            **result('output_indices'),
            **result('output_shape'),
        }

        edges = [
            *connect('input_indices', '0:sparse_reshape_node'),
            *connect('input_shape', '1:sparse_reshape_node'),
            *connect('new_shape', '2:sparse_reshape_node'),
            *connect('sparse_reshape_node:0', 'output_indices'),
            ('sparse_reshape_node', 'sparse_reshape_node_d:out_port_1', {
                'out': 1
            }),
            ('sparse_reshape_node_d:out_port_1', 'output_shape', {
                'in': 0
            }),
        ]

        graph = build_graph(
            nodes,
            edges,
            update_attributes={'input_indices_d': {
                'value': input_indices
            }})
        graph.stage = 'middle'
        partial_infer(graph)

        node = Node(graph, 'sparse_reshape_node')
        output_indices = node.out_port(0).data.get_value()
        actual_output_shape = node.out_port(1).data.get_value()
        self.assertTrue(
            strict_compare_tensors(actual_output_shape, ref_out_shape))
        self.assertTrue(strict_compare_tensors(output_indices,
                                               ref_out_indices))
Beispiel #13
0
    def build_select_graph_and_infer(condition_value,
                                     then_value,
                                     else_value,
                                     out_value,
                                     condition_shape=None,
                                     then_shape=None,
                                     else_shape=None,
                                     out_shape=None,
                                     auto_broadcast='numpy',
                                     fw_format=None):
        if then_value is not None:
            then_shape = int64_array(then_value.shape)
        if else_value is not None:
            else_shape = int64_array(else_value.shape)

        nodes = {
            **valued_const_with_data('then', then_value, then_shape),
            **valued_const_with_data('else', else_value, else_shape),
            **valued_const_with_data('condition', condition_value, condition_shape),
            **regular_op_with_empty_data(
                'select', {
                    'op': 'Select',
                    'auto_broadcast': auto_broadcast,
                    'format': fw_format
                }),
            **result('out'),
        }
        edges = [
            *connect('condition', '0:select'),
            *connect('then', '1:select'),
            *connect('else', '2:select'),
            *connect('select', 'out'),
        ]
        graph = build_graph(nodes, edges)

        select_node = Node(graph, 'select')
        Select.infer(select_node)

        select_out_node = Node(graph, 'select_d')

        value_desc = 'values'
        ref_val = out_value
        actual_val = select_out_node['value']
        if out_shape is not None:
            value_desc = 'shapes'
            ref_val = out_shape
            actual_val = select_out_node['shape']
            assert select_out_node[
                'value'] is None, "if 'out_shape' is defined manually 'value' must be None"

        flag = strict_compare_tensors(actual_val, ref_val)
        msg = '' if flag else 'reference {} and actual {} {} do not match\n'.format(
            ref_val, actual_val, value_desc)
        return flag, msg
Beispiel #14
0
def nodes_dict(original, transformed=None, levels=255, data=None, il=[-127], ih=[127], ol=[-127], oh=[127]):
    shape = [1, 2, 3, 4] if data is None else np.array(data).shape
    data = np.ones(shape, dtype=original) if data is None else np.array(data, dtype=original)
    int_data = data.astype(dtype=np.int8)
    transformed = transformed if transformed is not None else original

    return {
        **valued_const_with_data('weights', data),
        **valued_const_with_data('int_weights', int_data),

        **regular_op_with_shaped_data(
            'cast', shape, {'type': 'Convert', 'op': 'Cast', 'infer': Cast.infer, 'dst_type': transformed}),

        **valued_const_with_data('il', np.array(il)),
        **valued_const_with_data('ih', np.array(ih)),
        **valued_const_with_data('ol', np.array(ol)),
        **valued_const_with_data('oh', np.array(oh)),

        **regular_op_with_shaped_data(
            'FQ', shape, {'type': 'FakeQuantize', 'infer': FakeQuantize.infer, 'stop_value_propagation': True,
                               'levels': levels, 'op': 'FakeQuantize'}),

        **valued_const_with_data('zp', np.array([0])),
        **valued_const_with_data('scale', np.array([1])),

        **regular_op_with_shaped_data(
            'sub', shape, {'type': 'Subtract', 'op': 'Sub', 'infer': lambda node: eltwise_infer(node, Sub.operation)}),

        **regular_op_with_shaped_data(
            'mul', shape, {'type': 'Multiply', 'op': 'Mul', 'infer': lambda node: eltwise_infer(node, Mul.operation)}),

        **result()
}
Beispiel #15
0
    def build_and_test_reverse_inference(order, out_shape, ref_shape):
        nodes = {
            **shaped_parameter('data', None, {
                'reverse_infer': Parameter.reverse_infer
            }),
            **valued_const_with_data('order', int64_array(order)),
            **regular_op_with_empty_data(
                'transpose', {
                    'op': 'Transpose',
                    'infer': Transpose.infer,
                    'reverse_infer': Transpose.reverse_infer
                }),
            **result('res'),
        }

        edges = [
            *connect('data', '0:transpose'), *connect('order', '1:transpose'),
            *connect('transpose', 'res')
        ]

        graph = build_graph(nodes, edges)
        graph.stage = 'middle'
        Node(graph,
             'transpose').out_port(0).data.set_shape(shape_array(out_shape))

        partial_infer(graph)
        actual_shape = Node(graph, 'data').out_port(0).data.get_shape()
        assert strict_compare_tensors(actual_shape, shape_array(ref_shape))
Beispiel #16
0
    def build_graph_to_test_type_alignment(edges,
                                           input_1_type=np.float32,
                                           input_2_type=np.float32,
                                           const_type=np.float32):
        input_shape = int64_array([1, 3, 255, 255])
        const_value = np.array([1], dtype=const_type)

        nodes = {
            **shaped_parameter('input_1', input_shape, {
                'data_type': input_1_type
            }),
            **shaped_parameter('input_2', input_shape, {
                'data_type': input_2_type
            }),
            **regular_op_with_empty_data('add', {
                'op': 'Add',
                'type': 'Add',
                'type_infer': Elementwise.type_infer
            }),
            **valued_const_with_data('const',
                                     const_value,
                                     kwargs={'data_type': const_type}),
            **result('result'),
        }
        graph = build_graph(nodes, edges, nodes_with_edges_only=True)
        graph.stage = 'back'
        return graph
    def setUp(self):
        nodes = {
            **regular_op_with_shaped_data('boxes', [10, 100, 4], {
                                              'type': 'Parameter'
                                          }),
            **regular_op_with_shaped_data('scores', [10, 5, 100], {
                                              'type': 'Parameter'
                                          }),
            **valued_const_with_data('max_output_per_class', int64_array(7)),
            **regular_op_with_shaped_data(
                'nms', None, {
                    'op': 'NonMaxSuppression',
                    'type': 'NonMaxSuppression',
                    'name': 'nms'
                }),
            **result('output'),
        }

        self.graph = build_graph(nodes, [
            *connect('boxes', '0:nms'),
            *connect('scores', '1:nms'),
            *connect('max_output_per_class', '2:nms'),
            *connect('nms', 'output'),
        ],
                                 nodes_with_edges_only=True)
Beispiel #18
0
    def test_backward_bfs_multi_consumer_data_nodes(self):
        # Placeholder-> Mul -> Result
        # Const      -/    \- Result2

        graph = build_graph(
            {
                **regular_op_with_shaped_data('parameter', [1], {
                                                  'op': 'Parameter'
                                              }),
                **valued_const_with_data('const', int64_array([5])),
                **regular_op_with_shaped_data('mul', [1], {'op': 'Mul'}),
                **result('result'),
                **result('result2'),
            }, [
                *connect('parameter', '0:mul'),
                *connect('const', '1:mul'),
                *connect('mul:0', 'result'),
                *connect_data('mul', 'result2'),
            ])

        res = common_bfs(Node(graph, 'result'), ['Mul'], ['Parameter'],
                         is_backward=True,
                         attr_to_check='op',
                         follow_multi_consumer_data_nodes=True)
        self.assertTrue(
            len(res) == 1,
            'The multi-consumer data node "mul_d" was not followed')

        res = common_bfs(Node(graph, 'result'), ['Mul'], ['Parameter'],
                         is_backward=True,
                         attr_to_check='op')
        self.assertTrue(
            len(res) == 0, 'The multi-consumer data node "mul_d" was followed')
Beispiel #19
0
    def test_reshape_on_the_B_input(self,
                                    in1_shape, in2_shape, reshape_pattern, transpose_a, transpose_b, updated_pattern):
        nodes = {
            **regular_op_with_shaped_data('in_1', in1_shape, dict(type='Parameter', op='Parameter')),
            **regular_op_with_shaped_data('in_2', in2_shape, dict(type='Parameter', op='Parameter')),
            **valued_const_with_data('dim', int64_array(reshape_pattern)),
            **op_with_empty_data('reshape',
                                 dict(type='Reshape', op='Reshape', infer=Reshape.infer, need_shape_inference=True)),
            **op_with_empty_data('matmul',
                                 dict(type='MatMul', op='MatMul', infer=MatMul.infer, need_shape_inference=True,
                                      transpose_a=transpose_a, transpose_b=transpose_b, dim_attrs={})),
            **result(),
        }
        edges = [
            *connect('in_1:0', '0:matmul'),
            *connect('in_2:0', '0:reshape'),
            *connect('dim:0', '1:reshape'),
            *connect('reshape:0', '1:matmul'),
            *connect('matmul:0', 'output'),
        ]
        graph = build_graph(nodes_attrs=nodes, edges=edges, cli=Namespace(static_shape=True))
        graph.clean_up()
        SmartReshape_HC_Reshape_MatMul().find_and_replace_pattern(graph)
        graph.clean_up()

        graph_ref = build_graph(nodes_attrs=nodes, edges=edges, update_attributes={
            'dim': {'value': int64_array(updated_pattern)}, 'dim_d': {'value': int64_array(updated_pattern)}})
        graph_ref.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True)
        self.assertTrue(flag, resp)
Beispiel #20
0
def build_cast_test_graphs(input_data, dst_type_str='FP16'):
    nodes = {
        **valued_const_with_data('input', float32_array(input_data)),
        **regular_op_with_empty_data('cast', {'type': 'Convert', 'op': 'Cast',
                                              'dst_type': np.float32,
                                              'infer': Cast.infer}),
        **result('res'),
    }

    nodes_ref = deepcopy(nodes)
    nodes_ref.update({
        **regular_op_with_empty_data('cast', {'type': 'Convert', 'op': 'Cast',
                                              'dst_type': data_type_str_to_np(dst_type_str),
                                              'infer': Cast.infer}),
    })

    edges = [
        *connect('input', 'cast'),
        *connect('cast', 'res'),
    ]
    graph = build_graph(nodes, edges)
    graph_ref = build_graph(nodes_ref, edges)

    graph = partial_infer(graph)

    graph.graph['cmd_params'].data_type = dst_type_str
    convert_blobs(graph, dst_type_str)
    return graph, graph_ref
Beispiel #21
0
    def test_div_with_integer(self):
        # Test where transformation should not be applied because the divisor is integer
        graph = build_graph(
            {
                **regular_op_with_shaped_data('parameter', [1, 227, 227, 3], {
                                                  'type': 'Parameter',
                                                  'data_type': np.int32
                                              }),
                **valued_const_with_data('const',
                                         np.array([-1.], dtype=np.int32)),
                **regular_op_with_shaped_data('div', None, {
                    'op': 'Div',
                    'type': 'Divide',
                    'name': 'my_div'
                }),
                **result()
            }, [
                *connect('parameter:0', '0:div'),
                *connect_data('const:0', '1:div'),
                *connect('div', 'output'),
            ])
        graph_ref = graph.copy()
        Div().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'output',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Beispiel #22
0
 def test_v7_group_convolution_resolver_weight_are_in_the_right_layout(
         self):
     nodes = {
         **regular_op_with_shaped_data('input', [1, 3, 224, 224], {
                                           'type': 'Parameter'
                                       }),
         **valued_const_with_data('weights', np.ones([24, 1, 7, 7])),
         **regular_op_with_shaped_data('convolution', None, {
             'type': 'Convolution',
             'group': 3,
             'output': 24
         }),
         **result(),
     }
     edges = [
         *connect('input', '0:convolution'),
         *connect('weights', '1:convolution'),
         *connect('convolution', 'output'),
     ]
     graph = build_graph(nodes, edges)
     V7ConvolutionWithGroupsResolver().find_and_replace_pattern(graph)
     graph_ref = build_graph(nodes, edges)
     (flag, resp) = compare_graphs(graph,
                                   graph_ref,
                                   last_node='output',
                                   check_op_attrs=True)
     self.assertTrue(flag, resp)
    def get_graphs(input_shape, reshape_0_pattern, order, reshape_1_pattern, block_size):
        nodes = {
            **regular_op_with_shaped_data('input', input_shape, {'type': 'Parameter', 'shape': int64_array(input_shape),
                                                                 'infer': Parameter.infer}),

            **valued_const_with_data('reshape_0_pattern', int64_array(reshape_0_pattern)),
            **regular_op_with_empty_data('reshape_0', {'type': 'Reshape', 'infer': Reshape.infer}),

            **valued_const_with_data('order', int64_array(order)),
            **regular_op_with_empty_data('transpose', {'type': 'Transpose', 'infer': Transpose.infer}),

            **valued_const_with_data('reshape_1_pattern', int64_array(reshape_1_pattern)),
            **regular_op_with_empty_data('reshape_1', {'type': 'Reshape', 'infer': Reshape.infer,
                                                       'name': 'final_reshape'}),

            **result(),
        }
        edges = [
            *connect('input', '0:reshape_0'),
            *connect('reshape_0_pattern', '1:reshape_0'),
            *connect('reshape_0', '0:transpose'),
            *connect('order', '1:transpose'),
            *connect('transpose', '0:reshape_1'),
            *connect('reshape_1_pattern', '1:reshape_1'),
            *connect('reshape_1', 'output'),
        ]
        graph = build_graph(nodes, edges, nodes_with_edges_only=True, cli=Namespace())
        for node in graph.get_op_nodes():
            node['op'] = node['type']
        graph.clean_up()

        ref_nodes = {
            **regular_op_with_shaped_data('input', input_shape, {'type': 'Parameter', 'shape': int64_array(input_shape),
                                                                 'infer': Parameter.infer}),
            **regular_op_with_empty_data('depth_to_space', {'type': 'DepthToSpace', 'infer': DepthToSpaceOp.infer,
                                                            'name': 'final_reshape', 'block_size': block_size}),
            **result()
        }
        ref_edges = [*connect('input', 'depth_to_space'), *connect('depth_to_space', 'output')]
        graph_ref = build_graph(ref_nodes, ref_edges, nodes_with_edges_only=True)
        for node in graph_ref.get_op_nodes():
            node['op'] = node['type']
        graph_ref.clean_up()
        graph.graph['layout'] = 'NCHW'
        graph_ref.graph['layout'] = 'NCHW'

        return graph, graph_ref
    def test_transpose_insert(self, nhwc_to_nchw_order, nchw_to_nhwc_order, add_permutation_attrs):
        graph_nodes = {
            **valued_const_with_data('transpose_parameter_order', np.array(nhwc_to_nchw_order)),
            **valued_const_with_data('transpose_result_order', np.array(nchw_to_nhwc_order))
        }
        graph_nodes.update(nodes)
        shape_len = len(nhwc_to_nchw_order) if add_permutation_attrs else 3
        shape = np.array(range(shape_len))
        add_shape = shape if nhwc_to_nchw_order is None else shape[nhwc_to_nchw_order]
        graph_nodes.update(
            {
                **regular_op_with_shaped_data('placeholder1', shape,
                                              {'type': 'Parameter', 'rt_info': RTInfo(), 'shape': shape}),
                **regular_op_with_shaped_data('result', shape, {'type': 'Result', 'rt_info': RTInfo(), 'shape': shape}),
                **regular_op_with_shaped_data('add', add_shape,
                                              {'type': 'Add', 'op': 'Add', 'infer': copy_shape_infer}),
            }
        )

        graph = build_graph(graph_nodes, edges)
        graph_ref = build_graph(graph_nodes, edges_with_transpose if add_permutation_attrs else edges)

        param_node = Node(graph, 'placeholder1')
        result_node = Node(graph, 'result')

        if add_permutation_attrs:
            shape_len = len(nhwc_to_nchw_order)
            param_node['permute_attrs'] = PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
            param_node.out_node(0)['permutation'] = PermuteAttrs().get_nhwc_to_nchw_permutation(shape_len)
            result_node.in_node(0)['permutation'] = PermuteAttrs().get_nhwc_to_nchw_permutation(shape_len)

        PreserveRuntimeInfo().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)

        self.assertFalse(param_node.has_valid('permute_attrs'))
        self.assertFalse(param_node.out_node(0).has_valid('permutation'))

        if add_permutation_attrs:
            rt_info = param_node.rt_info.info
            old_api_map = rt_info[('old_api_map_order', 0)].info
            self.assertTrue(np.array_equal(old_api_map['inverse_order'], nchw_to_nhwc_order))

            rt_info = result_node.rt_info.info
            old_api_map = rt_info[('old_api_map_order', 0)].info
            self.assertTrue(np.array_equal(old_api_map['order'], nhwc_to_nchw_order))
    def test_pool_v2_to_attributed_pool(self):
        nodes = {
            **shaped_const_with_data('input', int64_array([200, 200])),
            **valued_const_with_data('windows', int64_array([4, 4])),
            **valued_const_with_data('strides', int64_array([4, 4])),
            **regular_op_with_empty_data(
                'pool_v2', {
                    'op': 'PoolingV2',
                    'pad': [2, 2],
                    'spatial_dims': [1, 2],
                    'auto_pad': 'same_upper',
                    'output_spatial_shape': [2, 3],
                    'pad_spatial_shape': [1, 2],
                    'pool_method': 'max',
                    'permute_attrs': None
                }),
            **regular_op_with_empty_data(
                'pool_v1', {
                    'type': 'Pooling',
                    'pad': [2, 2],
                    'spatial_dims': [1, 2],
                    'auto_pad': 'same_upper',
                    'output_spatial_shape': [2, 3],
                    'pad_spatial_shape': [1, 2],
                    'pool_method': 'max'
                }),
            **result('output')
        }

        edges = [
            *connect('input', 'pool_v2:0'),
            *connect('windows', 'pool_v2:1'),
            *connect('strides', 'pool_v2:2'),
            *connect('pool_v2', 'output'),
        ]

        graph = build_graph(nodes, edges, nodes_with_edges_only=True)
        PoolV2ToAttributedPool().find_and_replace_pattern(graph)

        ref_graph = build_graph(
            nodes,
            [*connect('input', 'pool_v1'), *connect('pool_v1', 'output')],
            nodes_with_edges_only=True)
        (flag, resp) = compare_graphs(graph, ref_graph, 'output')
        self.assertTrue(flag, resp)
    def get_graphs(input_shape, reshape_0_pattern, order, reshape_1_pattern, group):
        nodes = {
            **regular_op_with_shaped_data('input', input_shape, {'type': 'Parameter', 'shape': int64_array(input_shape),
                                                                 'infer': Parameter.infer}),

            **valued_const_with_data('reshape_0_pattern', int64_array(reshape_0_pattern)),
            **regular_op_with_empty_data('reshape_0', {'type': 'Reshape', 'infer': Reshape.infer}),

            **valued_const_with_data('order', int64_array(order)),
            **regular_op_with_empty_data('transpose', {'type': 'Transpose', 'infer': Transpose.infer}),

            **valued_const_with_data('reshape_1_pattern', int64_array(reshape_1_pattern)),
            **regular_op_with_empty_data('reshape_1', {'type': 'Reshape', 'infer': Reshape.infer,
                                                       'name': 'final_reshape'}),

            **result(),
        }
        edges = [
            *connect('input', '0:reshape_0'),
            *connect('reshape_0_pattern', '1:reshape_0'),
            *connect('reshape_0', '0:transpose'),
            *connect('order', '1:transpose'),
            *connect('transpose', '0:reshape_1'),
            *connect('reshape_1_pattern', '1:reshape_1'),
            *connect('reshape_1', 'output'),
        ]
        graph = build_graph(nodes, edges, nodes_with_edges_only=True)
        for node in graph.get_op_nodes():
            node['op'] = node['type']
        graph.clean_up()

        ref_nodes = {
            **regular_op_with_shaped_data('input', input_shape, {'type': 'Parameter', 'shape': int64_array(input_shape),
                                                                 'infer': Parameter.infer}),
            **regular_op_with_empty_data('shuffle_channel', {'type': 'ShuffleChannels', 'infer': ShuffleChannels.infer,
                                                             'name': 'final_reshape', 'group': group}),
            **result()
        }
        ref_edges = [*connect('input', 'shuffle_channel'), *connect('shuffle_channel', 'output')]
        graph_ref = build_graph(ref_nodes, ref_edges, nodes_with_edges_only=True)
        for node in graph_ref.get_op_nodes():
            node['op'] = node['type']
        graph_ref.clean_up()

        return graph, graph_ref
Beispiel #27
0
    def test_zero_point_optimization(self, weights, zero_point, adj_weights,
                                     adj_zero_point):
        nodes = lambda w, zp: {
            **valued_const_with_data('weights', np.array(w, dtype=np.int8)),
            **regular_op_with_shaped_data(
                'cast', len(w), {
                    'type': 'Convert',
                    'op': 'Cast',
                    'infer': Cast.infer,
                    'dst_type': np.float32
                }),
            **valued_const_with_data('zp', np.array(zp, dtype=np.float32)),
            **regular_op_with_shaped_data(
                'sub', len(w), {
                    'type': 'Subtract',
                    'op': 'Sub',
                    'infer': lambda node: eltwise_infer(node, Sub.operation)
                }),
            **result()
        }
        edges = [
            *connect("weights:0", "0:cast"),
            *connect("cast:0", "0:sub"),
            *connect("zp:0", "1:sub"),
            *connect("sub:0", "0:output"),
        ]
        graph = build_graph(nodes(weights, zero_point),
                            edges,
                            nodes_with_edges_only=True)
        ZeroPointOptimizer().find_and_replace_pattern(graph)
        graph.clean_up()

        graph_ref = build_graph(nodes(adj_weights, adj_zero_point), [
            *connect("weights:0", "0:cast"),
            *connect("cast:0", "0:output"),
        ],
                                nodes_with_edges_only=True)
        graph_ref.clean_up()

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'output',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Beispiel #28
0
    def test_v10_group_convolution_resolver_depthwise_conv2d(self):
        nodes = {
            **regular_op_with_shaped_data('input', [1, 1, 224, 224], {
                                              'type': 'Parameter'
                                          }),
            **valued_const_with_data('weights', np.ones([1, 8, 7, 7])),
            **valued_const_with_data('dim', int64_array([1, 8, 1, 7, 7])),
            **regular_op_with_empty_data('reshape', {'type': 'Reshape'}),
            **regular_op_with_shaped_data(
                'convolution', None, {
                    'type': 'Convolution',
                    'group': 1,
                    'output': 8,
                    'op': 'DepthwiseConv2dNative'
                }),
            **result(),
        }
        graph = build_graph(nodes, [
            *connect('input', '0:convolution'),
            *connect('weights', '1:convolution'),
            *connect('convolution', 'output'),
        ],
                            nodes_with_edges_only=True)

        V10ConvolutionWithGroupsResolver().find_and_replace_pattern(graph)

        nodes['convolution']['type'] = 'GroupConvolution'
        del nodes['convolution']['group']

        graph_ref = build_graph(nodes, [
            *connect('input', '0:convolution'),
            *connect('weights', '0:reshape'),
            *connect('dim', '1:reshape'),
            *connect('reshape', '1:convolution'),
            *connect('convolution', 'output'),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      last_node='output',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Beispiel #29
0
    def test_set_value_and_shape_with_force_shape_attribute_in_op(self):
        import numpy as np
        graph = build_graph({**valued_const_with_data('const', np.array([1, 2, 3])), **result()},
                            [*connect('const', 'output')])

        node = Node(graph, 'const')
        node['force_shape'] = np.array([2, 5, 7], dtype=np.int64)
        node.out_port(0).data.set_value(np.zeros(35))
        self.assertTrue(np.array_equal(node.out_port(0).data.get_shape(), np.array([2, 5, 7], dtype=np.int64)),
                        "node.out_port(0).data.get_shape()={} != [2, 5, 7]".format(node.out_port(0).data.get_shape()))
Beispiel #30
0
    def test_multi(self):
        nodes = {
            **regular_op_with_empty_data('input', {'type': 'Parameter'}),
            **regular_op_with_empty_data('some_op', {'type': 'SomeOp', 'name': 'some_op_name'}),
            **empty_data('some_op_d2'),
            **regular_op_with_empty_data('fake_output1',
                                         {'type': None, 'kind': 'op', 'op': 'FakeOutput', 'name': 'my_output_name1'}),
            **regular_op_with_empty_data('fake_output2',
                                         {'type': None, 'kind': 'op', 'op': 'FakeOutput', 'name': 'my_output_name2'}),

            **valued_const_with_data('const1', int64_array(0)),
            **valued_const_with_data('const2', int64_array(0)),
            **regular_op_with_empty_data('add1', {'type': None, 'kind': 'op', 'op': 'Add', 'name': 'my_output_name1'}),
            **regular_op_with_empty_data('add2', {'type': None, 'kind': 'op', 'op': 'Add', 'name': 'my_output_name2'}),
            **result('result1'),
            **result('result2'),
        }
        edges = [*connect('input', 'some_op'),
                 *connect('some_op', 'fake_output1'),
                 ('some_op', 'some_op_d2'),
                 ('some_op_d2', 'fake_output2'),
                 *connect('fake_output1', 'result1'),
                 *connect('fake_output2', 'result2'),
                 ]
        graph = build_graph(nodes, edges)

        edges_ref = [*connect('input', 'some_op'),
                     *connect('some_op', '0:add1'),
                     *connect('const1', '1:add1'),
                     ('some_op', 'some_op_d2'),
                     ('some_op_d2', 'add2', {'in': 0}),
                     *connect('const2', '1:add2'),
                     *connect('add1', 'result1'),
                     *connect('add2', 'result2'),
                     ]

        graph_ref = build_graph(nodes, edges_ref)

        FakeOutputResolver().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result1')
        self.assertTrue(flag, resp)