def test_slice_infer(self, inp_value, starts, ends, axes, steps, expected, inp_shape=None):
            if inp_value is None:
                input_node = shaped_data('data_1', int64_array(inp_shape))
            else:
                input_node = valued_data('data_1', int64_array(inp_value))

            nodes = {
                **input_node,
                **regular_op_with_empty_data('slice', {'op': 'Slice'}),
                **valued_const_with_data('starts', int64_array(starts)),
                **valued_const_with_data('ends', int64_array(ends)),
                **valued_const_with_data('axes', int64_array(axes)),
                **valued_const_with_data('steps', int64_array(steps)),
            }

            graph = build_graph(nodes,
                                [('data_1', 'slice'),
                                 *connect('starts', '1:slice'),
                                 *connect('ends', '2:slice'),
                                 *connect('axes', '3:slice'),
                                 *connect('steps', '4:slice'),
                                 *connect('slice', 'slice_d')])

            graph.stage = 'middle'
            slice_node = Node(graph, 'slice')

            Slice.infer(slice_node)
            if inp_value is not None:
                self.assertTrue(np.array_equal(slice_node.out_node().value, expected))
            else:
                self.assertTrue(np.array_equal(slice_node.out_node().shape, expected))
Exemple #2
0
    def test_scatterelements_value_infer(self, data, indices, updates, axis,
                                         ref_res):
        nodes = {
            **valued_const_with_data('data', np.array(data)),
            **valued_const_with_data('indices', int64_array(indices)),
            **valued_const_with_data('updates', np.array(updates)),
            **valued_const_with_data('axis', int64_array(axis)),
            **regular_op_with_empty_data('scatter_elements', {
                'op': 'ScatterElementsUpdate',
                'axis': axis
            }),
            **result()
        }

        graph = build_graph(nodes_attrs=nodes,
                            edges=[
                                *connect('data', '0:scatter_elements'),
                                *connect('indices', '1:scatter_elements'),
                                *connect('updates', '2:scatter_elements'),
                                *connect('axis', '3:scatter_elements'),
                                *connect('scatter_elements', 'output')
                            ],
                            nodes_with_edges_only=True)
        graph.stage = 'middle'

        scatter_el_node = Node(graph, 'scatter_elements')
        ScatterElementsUpdate.infer(scatter_el_node)

        res_output_shape = scatter_el_node.out_node().shape
        self.assertTrue(
            np.array_equal(int64_array(ref_res).shape, res_output_shape))

        res_output_value = scatter_el_node.out_node().value
        self.assertTrue(np.array_equal(ref_res, res_output_value))
Exemple #3
0
    def test_not_useless_pad_non_constant_input(self):
        nodes = {
            **regular_op_with_shaped_data('placeholder', [10, 20, 3], {'type': 'Parameter'}),
            **regular_op_with_shaped_data('shape_of_1', [3], {'type': 'ShapeOf'}),
            **regular_op_with_shaped_data('sub', [3], {'type': 'Subtract', 'op': 'Sub'}),
            **valued_const_with_data('desired_output_size', int64_array([10, 20, 3])),
            **regular_op_with_shaped_data('pad', [10, 20, 3], {'type': 'Pad', 'op': 'Pad'}),
            **valued_const_with_data('fill_value', np.array(1)),
            **result('result'),
        }
        edges = [*connect('placeholder', '0:pad'),
                 *connect('placeholder', 'shape_of_1'),
                 *connect('shape_of_1', '0:sub'),
                 *connect('desired_output_size', '1:sub'),
                 *connect('sub', '1:pad'),
                 *connect_data('sub', '2:pad'),
                 *connect('fill_value', '3:pad'),
                 *connect('pad', 'result'),
                 ]
        graph = build_graph(nodes, edges)
        RemoveUselessPad().find_and_replace_pattern(graph)
        ref_graph = build_graph(nodes, edges)

        (flag, resp) = compare_graphs(graph, ref_graph, 'result')
        self.assertTrue(flag, resp)
Exemple #4
0
    def test_broadcast(self, data, target_shape, axes_mapping=None, mode='numpy', ref_out=None, test_raising=False):
        if ref_out is not None:
            input = valued_const_with_data('data', int64_array(data))
        else:
            input = shaped_data('data', int64_array(data))

        nodes = {
            **input,
            **valued_const_with_data('target_shape', int64_array(target_shape)),
            **regular_op_with_empty_data('broadcast', {'op': 'Broadcast', 'mode': mode}),
        }

        edges = [('data', 'broadcast'),
                 ('target_shape', 'broadcast'),
                 ('broadcast', 'broadcast_d')]

        if axes_mapping is not None:
            nodes.update(**valued_const_with_data('axes_mapping', int64_array(axes_mapping)))
            edges.append(('axes_mapping', 'broadcast'))
        graph = build_graph(nodes, edges)

        broadcast_node = Node(graph, 'broadcast')
        if test_raising:
            self.assertRaises(AssertionError, Broadcast.infer, broadcast_node)
            return

        Broadcast.infer(broadcast_node)
        if ref_out is not None:
            self.assertTrue(np.array_equal(broadcast_node.out_node().value, np.array(ref_out)))
        else:
            self.assertTrue(np.array_equal(broadcast_node.out_node().shape, np.array(target_shape)))
Exemple #5
0
    def test_2_inputs(self):
        nodes = {
            **regular_op_with_shaped_data('placeholder', [1, 3, 20, 20], {
                                              'type': 'Parameter'
                                          }),
            **regular_op_with_shaped_data('a_clamp', [1, 3, 20, 20], {
                                              'type': None,
                                              'op': 'Clamp'
                                          }),
            **regular_op_with_shaped_data('clamp', [1, 3, 20, 20], {
                                              'type': 'Clamp',
                                              'op': 'AttributedClamp',
                                              'min': -3.5,
                                              'max': 3.5
                                          }),
            **valued_const_with_data('min', np.array(-3.5)),
            **valued_const_with_data('max', np.array(3.5)),
            **result('result'),
        }
        edges = [
            *connect('placeholder', '0:a_clamp'),
            *connect('min', '1:a_clamp'),
            *connect('max', '2:a_clamp'),
            *connect('a_clamp', 'result'),
        ]
        graph = build_graph(nodes, edges)
        ClampNormalizer().find_and_replace_pattern(graph)
        ref_graph = build_graph(
            nodes,
            [*connect('placeholder', '0:clamp'), *connect('clamp', 'result')])

        (flag, resp) = compare_graphs(graph, ref_graph, 'result')
        self.assertTrue(flag, resp)
Exemple #6
0
    def test_gatherelements_value_infer(self, data, indices, axis, ref_res):
        nodes = {
            **valued_const_with_data('data', int64_array(data)),
            **valued_const_with_data('indices', int64_array(indices)),
            **regular_op_with_empty_data('gather_elements', {
                'op': 'GatherElements',
                'axis': axis
            }),
            **result()
        }

        graph = build_graph(nodes_attrs=nodes,
                            edges=[
                                *connect('data', '0:gather_elements'),
                                *connect('indices', '1:gather_elements'),
                                *connect('gather_elements', 'output')
                            ],
                            nodes_with_edges_only=True)
        graph.stage = 'middle'

        gather_el_node = Node(graph, 'gather_elements')
        GatherElements.infer(gather_el_node)

        res_output_shape = gather_el_node.out_node().shape
        self.assertTrue(
            np.array_equal(int64_array(ref_res).shape, res_output_shape))

        res_output_value = gather_el_node.out_node().value
        if res_output_value is not None:
            self.assertTrue(
                np.array_equal(int64_array(ref_res), res_output_value))
    def test_not_useless_pad_constant_input(self):
        nodes = {
            **regular_op_with_shaped_data('placeholder', [1, 10, 20, 3], {
                                              'type': 'Parameter'
                                          }),
            **regular_op_with_shaped_data('pad', [1, 10, 20, 3], {
                                              'type': 'Pad',
                                              'op': 'Pad'
                                          }),
            **valued_const_with_data('pads_begin', int64_array([0, 0, 0, 0])),
            **valued_const_with_data('pads_end', int64_array([0, 1, 0, 0])),
            **valued_const_with_data('fill_value', np.array(1)),
            **result('result'),
        }
        edges = [
            *connect('placeholder', '0:pad'),
            *connect('pads_begin', '1:pad'),
            *connect('pads_end', '2:pad'),
            *connect('fill_value', '3:pad'),
            *connect('pad', 'result'),
        ]
        graph = build_graph(nodes, edges)
        RemoveUselessPad().find_and_replace_pattern(graph)
        ref_graph = build_graph(nodes, edges)

        (flag, resp) = compare_graphs(graph, ref_graph, 'result')
        self.assertTrue(flag, resp)
Exemple #8
0
def generate_nodes(data, axis=-1, depth=4, on_value=1., off_value=0.):
    return {
        'indices': {'Op': 'Parameter', 'value': data, 'shape': int64_array(data.shape)},
        'indices_d': {'kind': 'data', 'value': data, 'shape': int64_array(data.shape)},
        **valued_const_with_data('depth', int64_array(depth)),
        **valued_const_with_data('on_value', float_array(on_value)),
        **valued_const_with_data('off_value', float_array(off_value)),
        **regular_op_with_shaped_data('one_hot', None, {'type': 'OneHot', 'axis': axis, 'Op': 'OneHot'})
    }
    def setUp(self):
        nodes = {
            **regular_op_with_shaped_data('boxes', [10, 100, 4], {
                                              'type': 'Parameter'
                                          }),
            **regular_op_with_shaped_data('scores', [10, 5, 100], {
                                              'type': 'Parameter'
                                          }),
            **valued_const_with_data('max_output_per_class', int64_array(7)),
            **regular_op_with_shaped_data(
                'nms', None, {
                    'op': 'NonMaxSuppression',
                    'type': 'NonMaxSuppression',
                    'name': 'nms'
                }),
            **result('output'),
        }

        self.graph = build_graph(nodes, [
            *connect('boxes', '0:nms'),
            *connect('scores', '1:nms'),
            *connect('max_output_per_class', '2:nms'),
            *connect('nms', 'output'),
        ],
                                 nodes_with_edges_only=True)
Exemple #10
0
    def test_div_with_integer(self):
        # Test where transformation should not be applied because the divisor is integer
        graph = build_graph(
            {
                **regular_op_with_shaped_data('parameter', [1, 227, 227, 3], {
                                                  'type': 'Parameter',
                                                  'data_type': np.int32
                                              }),
                **valued_const_with_data('const',
                                         np.array([-1.], dtype=np.int32)),
                **regular_op_with_shaped_data('div', None, {
                    'op': 'Div',
                    'type': 'Divide',
                    'name': 'my_div'
                }),
                **result()
            }, [
                *connect('parameter:0', '0:div'),
                *connect_data('const:0', '1:div'),
                *connect('div', 'output'),
            ])
        graph_ref = graph.copy()
        Div().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'output',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Exemple #11
0
    def test_backward_bfs_multi_consumer_data_nodes(self):
        # Placeholder-> Mul -> Result
        # Const      -/    \- Result2

        graph = build_graph(
            {
                **regular_op_with_shaped_data('parameter', [1], {
                                                  'op': 'Parameter'
                                              }),
                **valued_const_with_data('const', int64_array([5])),
                **regular_op_with_shaped_data('mul', [1], {'op': 'Mul'}),
                **result('result'),
                **result('result2'),
            }, [
                *connect('parameter', '0:mul'),
                *connect('const', '1:mul'),
                *connect('mul:0', 'result'),
                *connect_data('mul', 'result2'),
            ])

        res = common_bfs(Node(graph, 'result'), ['Mul'], ['Parameter'],
                         is_backward=True,
                         attr_to_check='op',
                         follow_multi_consumer_data_nodes=True)
        self.assertTrue(
            len(res) == 1,
            'The multi-consumer data node "mul_d" was not followed')

        res = common_bfs(Node(graph, 'result'), ['Mul'], ['Parameter'],
                         is_backward=True,
                         attr_to_check='op')
        self.assertTrue(
            len(res) == 0, 'The multi-consumer data node "mul_d" was followed')
 def test_v7_group_convolution_resolver_weight_are_in_the_right_layout(
         self):
     nodes = {
         **regular_op_with_shaped_data('input', None, {
             'type': 'Parameter'
         }),
         **valued_const_with_data('weights', np.ones([24, 1, 7, 7])),
         **regular_op_with_shaped_data('convolution', None, {
             'type': 'Convolution',
             'group': 3,
             'output': 24
         }),
         **result(),
     }
     edges = [
         *connect('input', '0:convolution'),
         *connect('weights', '1:convolution'),
         *connect('convolution', 'output'),
     ]
     graph = build_graph(nodes, edges)
     V7ConvolutionWithGroupsResolver().find_and_replace_pattern(graph)
     graph_ref = build_graph(nodes, edges)
     (flag, resp) = compare_graphs(graph,
                                   graph_ref,
                                   last_node='output',
                                   check_op_attrs=True)
     self.assertTrue(flag, resp)
Exemple #13
0
    def test_pool_v2_to_attributed_pool(self):
        nodes = {
            **shaped_const_with_data('input', int64_array([200, 200])),
            **valued_const_with_data('windows', int64_array([4, 4])),
            **valued_const_with_data('strides', int64_array([4, 4])),
            **regular_op_with_empty_data(
                'pool_v2', {
                    'op': 'PoolingV2',
                    'pad': [2, 2],
                    'spatial_dims': [1, 2],
                    'auto_pad': 'same_upper',
                    'output_spatial_shape': [2, 3],
                    'pad_spatial_shape': [1, 2],
                    'pool_method': 'max',
                    'permute_attrs': None
                }),
            **regular_op_with_empty_data(
                'pool_v1', {
                    'type': 'Pooling',
                    'pad': [2, 2],
                    'spatial_dims': [1, 2],
                    'auto_pad': 'same_upper',
                    'output_spatial_shape': [2, 3],
                    'pad_spatial_shape': [1, 2],
                    'pool_method': 'max'
                }),
            **result('output')
        }

        edges = [
            *connect('input', 'pool_v2:0'),
            *connect('windows', 'pool_v2:1'),
            *connect('strides', 'pool_v2:2'),
            *connect('pool_v2', 'output'),
        ]

        graph = build_graph(nodes, edges, nodes_with_edges_only=True)
        PoolV2ToAttributedPool().find_and_replace_pattern(graph)

        ref_graph = build_graph(
            nodes,
            [*connect('input', 'pool_v1'), *connect('pool_v1', 'output')],
            nodes_with_edges_only=True)
        (flag, resp) = compare_graphs(graph, ref_graph, 'output')
        self.assertTrue(flag, resp)
Exemple #14
0
    def test_reshape_on_the_A_input(self, in1_shape, in2_shape,
                                    reshape_pattern, transpose_a, transpose_b,
                                    updated_pattern):
        nodes = {
            **regular_op_with_shaped_data(
                'in_1', in1_shape, dict(type='Parameter', op='Parameter')),
            **regular_op_with_shaped_data(
                'in_2', in2_shape, dict(type='Parameter', op='Parameter')),
            **valued_const_with_data('dim', int64_array(reshape_pattern)),
            **op_with_empty_data(
                'reshape',
                dict(type='Reshape',
                     op='Reshape',
                     infer=Reshape.infer,
                     need_shape_inference=True)),
            **op_with_empty_data(
                'matmul',
                dict(type='MatMul',
                     op='MatMul',
                     infer=MatMul.infer,
                     need_shape_inference=True,
                     transpose_a=transpose_a,
                     transpose_b=transpose_b,
                     dim_attrs={})),
            **result(),
        }
        edges = [
            *connect('in_1:0', '0:reshape'),
            *connect('dim:0', '1:reshape'),
            *connect('reshape:0', '0:matmul'),
            *connect('in_2:0', '1:matmul'),
            *connect('matmul:0', 'output'),
        ]
        graph = build_graph(nodes_attrs=nodes,
                            edges=edges,
                            cli=Namespace(static_shape=True))
        graph.clean_up()
        SmartReshape_HC_Reshape_MatMul().find_and_replace_pattern(graph)
        graph.clean_up()

        graph_ref = build_graph(nodes_attrs=nodes,
                                edges=edges,
                                update_attributes={
                                    'dim': {
                                        'value': int64_array(updated_pattern)
                                    },
                                    'dim_d': {
                                        'value': int64_array(updated_pattern)
                                    }
                                })
        graph_ref.clean_up()

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      'output',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Exemple #15
0
    def test_set_value_and_shape_with_force_shape_attribute_in_op(self):
        import numpy as np
        graph = build_graph({**valued_const_with_data('const', np.array([1, 2, 3])), **result()},
                            [*connect('const', 'output')])

        node = Node(graph, 'const')
        node['force_shape'] = np.array([2, 5, 7], dtype=np.int64)
        node.out_port(0).data.set_value(np.zeros(35))
        self.assertTrue(np.array_equal(node.out_port(0).data.get_shape(), np.array([2, 5, 7], dtype=np.int64)),
                        "node.out_port(0).data.get_shape()={} != [2, 5, 7]".format(node.out_port(0).data.get_shape()))
    def test_multi(self):
        nodes = {
            **regular_op_with_empty_data('input', {'type': 'Parameter'}),
            **regular_op_with_empty_data('some_op', {'type': 'SomeOp', 'name': 'some_op_name'}),
            **empty_data('some_op_d2'),
            **regular_op_with_empty_data('fake_output1',
                                         {'type': None, 'kind': 'op', 'op': 'FakeOutput', 'name': 'my_output_name1'}),
            **regular_op_with_empty_data('fake_output2',
                                         {'type': None, 'kind': 'op', 'op': 'FakeOutput', 'name': 'my_output_name2'}),

            **valued_const_with_data('const1', int64_array(0)),
            **valued_const_with_data('const2', int64_array(0)),
            **regular_op_with_empty_data('add1', {'type': None, 'kind': 'op', 'op': 'Add', 'name': 'my_output_name1'}),
            **regular_op_with_empty_data('add2', {'type': None, 'kind': 'op', 'op': 'Add', 'name': 'my_output_name2'}),
            **result('result1'),
            **result('result2'),
        }
        edges = [*connect('input', 'some_op'),
                 *connect('some_op', 'fake_output1'),
                 ('some_op', 'some_op_d2'),
                 ('some_op_d2', 'fake_output2'),
                 *connect('fake_output1', 'result1'),
                 *connect('fake_output2', 'result2'),
                 ]
        graph = build_graph(nodes, edges)

        edges_ref = [*connect('input', 'some_op'),
                     *connect('some_op', '0:add1'),
                     *connect('const1', '1:add1'),
                     ('some_op', 'some_op_d2'),
                     ('some_op_d2', 'add2', {'in': 0}),
                     *connect('const2', '1:add2'),
                     *connect('add1', 'result1'),
                     *connect('add2', 'result2'),
                     ]

        graph_ref = build_graph(nodes, edges_ref)

        FakeOutputResolver().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result1')
        self.assertTrue(flag, resp)
def nodes_dict(original,
               transformed=None,
               levels=255,
               data=None,
               il=[-127],
               ih=[127],
               ol=[-127],
               oh=[127]):
    shape = [1, 2, 3, 4] if data is None else np.array(data).shape
    data = np.ones(shape, dtype=original) if data is None else np.array(
        data, dtype=original)
    int_data = data.astype(dtype=np.int8)
    transformed = transformed if transformed is not None else original

    return {
        **valued_const_with_data('weights', data),
        **valued_const_with_data('int_weights', int_data),
        **regular_op_with_shaped_data(
            'cast', shape, {
                'type': 'Convert',
                'op': 'Cast',
                'infer': Cast.infer,
                'dst_type': transformed
            }),
        **valued_const_with_data('il', np.array(il)),
        **valued_const_with_data('ih', np.array(ih)),
        **valued_const_with_data('ol', np.array(ol)),
        **valued_const_with_data('oh', np.array(oh)),
        **regular_op_with_shaped_data(
            'FQ', shape, {
                'type': 'FakeQuantize',
                'infer': FakeQuantize.infer,
                'stop_value_propagation': True,
                'levels': levels,
                'op': 'FakeQuantize'
            }),
        **valued_const_with_data('zp', np.array([0])),
        **valued_const_with_data('scale', np.array([1])),
        **regular_op_with_shaped_data(
            'sub', shape, {
                'type': 'Subtract',
                'op': 'Sub',
                'infer': lambda node: eltwise_infer(node, Sub.operation)
            }),
        **regular_op_with_shaped_data(
            'mul', shape, {
                'type': 'Multiply',
                'op': 'Mul',
                'infer': lambda node: eltwise_infer(node, Mul.operation)
            }),
        **result()
    }
    def test_v10_group_convolution_resolver(self):
        nodes = {
            **regular_op_with_shaped_data('input', [1, 3, 224, 224], {
                                              'type': 'Parameter'
                                          }),
            **valued_const_with_data('weights', np.ones([3, 8, 7, 7])),
            **valued_const_with_data('dim', int64_array([3, 8, 1, 7, 7])),
            **regular_op_with_empty_data('reshape', {'type': 'Reshape'}),
            **regular_op_with_shaped_data('convolution', None, {
                'type': 'Convolution',
                'group': 3,
                'output': 24
            }),
            **result(),
        }
        graph = build_graph(nodes, [
            *connect('input', '0:convolution'),
            *connect('weights', '1:convolution'),
            *connect('convolution', 'output'),
        ],
                            nodes_with_edges_only=True)

        V10ConvolutionWithGroupsResolver().find_and_replace_pattern(graph)

        nodes['convolution']['type'] = 'GroupConvolution'
        del nodes['convolution']['group']

        graph_ref = build_graph(nodes, [
            *connect('input', '0:convolution'),
            *connect('weights', '0:reshape'),
            *connect('dim', '1:reshape'),
            *connect('reshape', '1:convolution'),
            *connect('convolution', 'output'),
        ],
                                nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph,
                                      graph_ref,
                                      last_node='output',
                                      check_op_attrs=True)
        self.assertTrue(flag, resp)
Exemple #19
0
    def test_leaky_relu_mul_multiple_consumers(self):
        # multiple consumers of Mul operation
        graph = build_graph_with_edge_attrs(nodes, edges, {})
        additional_result = Result(graph, {'name': 'result_2'}).create_node()
        Node(graph, 'mul').out_port(0).connect(additional_result.in_port(0))

        ref_nodes = {
            **regular_op_with_shaped_data('input', shape, {
                'type': 'Parameter',
                'op': 'Parameter'
            }),
            **regular_op_with_shaped_data('mul', shape, {
                'type': 'Multiply',
                'name': 'mul'
            }),
            **regular_op_with_shaped_data('max', shape, {
                'type': 'Maximum',
                'name': 'final_max'
            }),
            **valued_const_with_data('const', float_array([0.5])),
            **regular_op_with_shaped_data('leaky_relu', shape, {
                'type': 'LeakyReLU',
                'name': 'max_final',
                'negative_slope': None
            }),
            **result('result'),
            **result('result_2')
        }
        ref_edges = [
            *connect('input:0', '0:mul'), *connect('const', '1:mul'),
            *connect('max:0', 'result'), *connect('mul:0', 'result_2'),
            *connect_data('input', 'leaky_relu'),
            *connect('leaky_relu', 'result')
        ]
        graph_ref = build_graph_with_edge_attrs(ref_nodes, ref_edges)

        LeakyReLUFusion().find_and_replace_pattern(graph)
        graph.clean_up()

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result_2')
        self.assertTrue(flag, resp)
Exemple #20
0
    def setUp(self):
        nodes = {
            **regular_op_with_shaped_data('data', [20, 100, 4], {'type': 'Parameter', 'value': None,
                                                                 '_out_port_data_type': {0: np.float32}}),
            **valued_const_with_data('k', int64_array(10)),
            **regular_op_with_shaped_data('topk', None, {'op': 'TopK', 'type': 'TopK', 'name': 'topk', 'axis': 1}),
            'topk_d2': {'kind': 'data', 'shape': None, 'value': None},
            **result('output_1'),
            **result('output_2'),
        }

        self.graph = build_graph(nodes, [
            *connect('data', '0:topk'),
            *connect('k', '1:topk'),
            ('topk', 'topk_d', {'out': 0}),
            ('topk', 'topk_d2', {'out': 1}),
            ('topk_d', 'output_1'),
            ('topk_d2', 'output_2'),
        ], nodes_with_edges_only=True)
Exemple #21
0
    def test_broadcast_with_range_positive_test(self):
        graph = build_graph({
            **regular_op_with_shaped_data('shape', [2], {'type': 'Parameter'}),
            **valued_const_with_data('value', np.arange(0, 384).reshape((1, 384))),
            **regular_op_with_empty_data('bc', {'type': 'Broadcast'}),
            **result(),
        }, [
            *connect('value', '0:bc'),
            *connect('shape', '1:bc'),
            *connect('bc', 'output'),
        ], nodes_with_edges_only=True)
        ExpandRangeConstant().find_and_replace_pattern(graph)

        graph_ref = build_graph({
            **regular_op_with_shaped_data('shape', [2], {'type': 'Parameter'}),

            # start
            **valued_const_with_data('start', np.array(0)),
            # limit
            **valued_const_with_data('minus_one', np.array(-1)),
            **valued_const_with_data('zero', np.array(0)),
            **regular_op_with_empty_data('range_dim', {'type': 'Gather'}),
            # delta
            **valued_const_with_data('delta', np.array(1)),
            **regular_op_with_empty_data('range', {'type': 'Range'}),

            # keep dims
            **valued_const_with_data('axes', np.array([0])),
            **regular_op_with_empty_data('keep_shape', {'type': 'Unsqueeze'}),

            **regular_op_with_empty_data('bc', {'type': 'Broadcast'}),
            **result(),
        }, [
            *connect('start', '0:range'),
            *connect('shape', '0:range_dim'),
            *connect('minus_one', '1:range_dim'),
            *connect('zero', '2:range_dim'),
            *connect('range_dim', '1:range'),
            *connect('delta', '2:range'),
            *connect('range', '0:keep_shape'),
            *connect('axes', '1:keep_shape'),
            *connect('keep_shape', '0:bc'),
            *connect_data('shape', '1:bc'),
            *connect('bc', 'output'),
        ], nodes_with_edges_only=True)

        (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True)
        self.assertTrue(flag, resp)
Exemple #22
0
nodes = {
    **regular_op_with_shaped_data('placeholder_1', [1, 227, 227, 3], {
                                      'type': 'Parameter'
                                  }),
    **regular_op_with_shaped_data('placeholder_2', [1, 227, 227, 3], {
                                      'type': 'Parameter'
                                  }),
    **regular_op_with_shaped_data('div', None, {
        'op': 'Div',
        'type': 'Divide',
        'name': 'my_div'
    }),
    **regular_op_with_shaped_data('reciprocal', [1, 227, 227, 3], {
                                      'type': 'Power'
                                  }),
    **valued_const_with_data('minus_one', np.array(-1.)),
    **regular_op_with_shaped_data('mul', None, {'type': 'Multiply'}),
    **result(),
}


class TestDiv(unittest.TestCase):
    def test_div_test_1(self):
        # Test with two different inputs from two placeholders
        graph = build_graph(nodes, [
            *connect('placeholder_1', '0:div'),
            *connect('placeholder_2', '1:div'),
            *connect('div', 'output'),
        ],
                            nodes_with_edges_only=True)
        Div().find_and_replace_pattern(graph)
Exemple #23
0
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import unittest
from argparse import Namespace

import numpy as np

from extensions.back.InterpolateReshape import InterpolateReshapeWA, InterpolateConcat
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import build_graph, result, regular_op_with_shaped_data, valued_const_with_data, connect, \
    connect_data

nodes = {
    **regular_op_with_shaped_data('placeholder', [1, 3, 30, 40], {'type': 'Parameter', 'op': 'Parameter'}),
    **valued_const_with_data('out_shape', np.array([60, 160])),

    **regular_op_with_shaped_data('interpolate', [1, 3, 60, 160], {'type': 'Interpolate', 'axes': [2, 3],
                                                                   'op': 'Interpolate', 'version': 'opset1'}),

    **regular_op_with_shaped_data('shape', [4], {'type': 'ShapeOf', 'op': 'ShapeOf'}),
    **valued_const_with_data('indices', np.array([2, 3])),
    **valued_const_with_data('axis', np.array(0)),
    **regular_op_with_shaped_data('gather', [2], {'type': 'Gather', 'op': 'Gather'}),

    **valued_const_with_data('multiplier', np.array([2, 4])),
    **regular_op_with_shaped_data('mul', [2], {'type': 'Multiply', 'op': 'Mul'}),

    **regular_op_with_shaped_data('placeholder_1', [1, 3, 60, 160], {'type': 'Parameter', 'op': 'Parameter'}),
    **regular_op_with_shaped_data('concat', [1, 7, 60, 160], {'type': 'Concat', 'axis': 1, 'op': 'Concat'}),
Exemple #24
0
 }),
 **regular_op_with_empty_data('ss_begin_clamp_min', {
     'value': np.iinfo(np.int32).min,
     'op': 'Const',
     'type': 'Const'
 }),
 **regular_op_with_empty_data('ss_begin_clamp_max', {
     'value': np.iinfo(np.int32).max,
     'op': 'Const',
     'type': 'Const'
 }),
 **regular_op_with_empty_data('ss_begin_gather_0', {
     'op': 'Gather',
     'type': 'Gather'
 }),
 **valued_const_with_data('ss_begin_gather_0_idx', int64_array([0])),
 **regular_op_with_shaped_data('ss_begin_gather_0_axis', [], {
                                   'op': 'Const',
                                   'type': 'Const',
                                   'value': [0]
                               }),
 **regular_op_with_empty_data('ss_begin_gather_1', {
     'op': 'Gather',
     'type': 'Gather'
 }),
 **valued_const_with_data('ss_begin_gather_1_idx', int64_array([1])),
 **regular_op_with_shaped_data('ss_begin_gather_1_axis', [], {
                                   'op': 'Const',
                                   'type': 'Const',
                                   'value': [0]
                               }),
Exemple #25
0
 limitations under the License.
"""

import unittest

import numpy as np

from extensions.ops.cumsum import CumSum
from mo.front.common.partial_infer.utils import int64_array
from mo.graph.graph import Node
from mo.utils.unittest.graph import build_graph, valued_const_with_data, regular_op_with_shaped_data, result, connect

nodes_attributes = {
    **regular_op_with_shaped_data('data', [1, 3, 224, 224], {'type': 'Parameter', 'value': None,
                                                             '_out_port_data_type': {0: np.float32}}),
    **valued_const_with_data('axis', int64_array(0)),
    **regular_op_with_shaped_data('cumsum', None, {'op': 'CumSum', 'type': 'CumSum', 'name': 'cumsum'}),
    **regular_op_with_shaped_data('identity', None, {'op': 'Identity', 'name': 'identity'}),
    **result('output'),
}


class TestCumSum(unittest.TestCase):
    def test_cumsum_axis(self):
        graph = build_graph(nodes_attributes,
                            [*connect('data', '0:cumsum'),
                             *connect('axis', '1:cumsum'),
                             *connect('cumsum', '0:identity'),
                             ('identity', 'identity_d', {'out': 0}),
                             ('identity_d', 'output'),
                             ],
Exemple #26
0
shape = int64_array([1, 3, 5, 2])
nodes = {
    **regular_op_with_shaped_data('input', shape, {
        'type': 'Parameter',
        'op': 'Parameter'
    }),
    **regular_op_with_shaped_data('mul', shape, {
        'type': 'Multiply',
        'name': 'mul'
    }),
    **regular_op_with_shaped_data('max', shape, {
        'type': 'Maximum',
        'name': 'final_max'
    }),
    **valued_const_with_data('const', float_array([0.5])),
    **result('result')
}

edges = [
    *connect('input:0', '0:mul'),
    *connect('const', '1:mul'),
    *connect_data('input', '0:max'),
    *connect('mul:0', '1:max'),
    *connect('max:0', 'result'),
]

ref_nodes = {
    **regular_op_with_shaped_data('input', shape, {
        'type': 'Parameter',
        'op': 'Parameter'
def graph_template(weights_initial_shape,
                   new_reshape_shape,
                   limits_initial_shape,
                   limits_new_shape=None):
    limits_new_shape = limits_initial_shape if limits_new_shape is None else limits_new_shape

    core_connections = [
        *connect('input:0', '0:convolution'),
        *connect('convolution:0', '0:output'),
    ]

    core_nodes = lambda weights_shape, limit_shape, reshape_shape: {
        **regular_op_with_shaped_data('input', None, {
            'type': 'Parameter',
            'op': 'Parameter'
        }),
        **valued_const_with_data('weights', np.ones(weights_shape)),
        **const_with_data('dim', int64_array(reshape_shape)),
        **regular_op_with_shaped_data('reshape', reshape_shape, {
            'type': 'Reshape',
            'infer': Reshape.infer,
            'op': 'Reshape'
        }),
        **valued_const_with_data('il', np.ones(limit_shape)),
        **valued_const_with_data('ih', np.ones(limit_shape)),
        **valued_const_with_data('ol', np.ones(limit_shape)),
        **valued_const_with_data('oh', np.ones(limit_shape)),
        **regular_op_with_shaped_data(
            'FQ', weights_shape, {
                'type': 'FakeQuantize',
                'infer': FakeQuantize.infer,
                'stop_value_propagation': True,
                'levels': 2,
                'op': 'FakeQuantize'
            }),
        **regular_op_with_shaped_data('convolution', None, {
            'type': 'Convolution',
            'op': 'Convolution'
        }),
        **result(),
    }

    nodes_before = core_nodes(weights_initial_shape, limits_initial_shape,
                              new_reshape_shape)
    edges_before = [
        *connect('weights:0', '0:FQ'),
        *connect('il:0', '1:FQ'),
        *connect('ih:0', '2:FQ'),
        *connect('ol:0', '3:FQ'),
        *connect('oh:0', '4:FQ'),
        *connect('FQ:0', '0:reshape'),
        *connect('dim:0', '1:reshape'),
        *connect('reshape:0', '1:convolution'),
        *core_connections,
    ]
    graph = build_graph(nodes_attrs=nodes_before,
                        edges=edges_before,
                        nodes_with_edges_only=True)

    nodes_after = core_nodes(new_reshape_shape, limits_new_shape, [])
    edges_after = [
        *connect('weights:0', '0:FQ'),
        *connect('il:0', '1:FQ'),
        *connect('ih:0', '2:FQ'),
        *connect('ol:0', '3:FQ'),
        *connect('oh:0', '4:FQ'),
        *connect('FQ:0', '1:convolution'),
        *core_connections,
    ]
    graph_ref = build_graph(nodes_attrs=nodes_after,
                            edges=edges_after,
                            nodes_with_edges_only=True)
    return graph, graph_ref
Exemple #28
0
    **regular_op_with_empty_data('rank', {
        'op': 'Rank',
        'type': None,
        'output_type': output_type,
        'name': 'my_rank'
    }),
    **result(),
    **regular_op_with_empty_data('shape', {
        'type': 'ShapeOf',
        'output_type': output_type
    }),
    **regular_op_with_empty_data('rank_1D', {
        'type': 'ShapeOf',
        'output_type': output_type
    }),
    **valued_const_with_data('zero', int64_array(0)),
    **regular_op_with_empty_data('rank_0D', {'type': 'Squeeze'}),
}


@generator
class RankDecomposerTest(unittest.TestCase):
    @generate(np.int32, np.int64)
    def test_rank_decomposer(self, output_type):
        graph = build_graph(nodes_attrs=nodes(output_type),
                            edges=[
                                *connect('input', 'rank'),
                                *connect('rank', 'output'),
                            ],
                            nodes_with_edges_only=True)
        RankDecomposer().find_and_replace_pattern(graph)
Exemple #29
0
                                  }),
    **regular_op_with_shaped_data('parameter_2', [1, 3, 227, 227], {
                                      'type': 'Parameter',
                                      'op': 'Parameter',
                                      'shape': [1, 3, 227, 227]
                                  }),
    **regular_op_with_shaped_data('mul_scale', [1, 3, 227, 227], {
                                      'type': 'Multiply',
                                      'op': 'Mul'
                                  }),
    **regular_op_with_shaped_data('add_mean', [1, 3, 227, 227], {
                                      'type': 'Add',
                                      'op': 'Add'
                                  }),
    **valued_const_with_data(
        'scale',
        np.array([1. / 1., 1. / 2., 1. / 3.]).reshape((1, 3, 1, 1))),
    **valued_const_with_data('mean',
                             np.array([-1., -2., -3.]).reshape((1, 3, 1, 1))),
    **regular_op_with_shaped_data('shape_of', [4], {
                                      'type': 'ShapeOf',
                                      'op': 'ShapeOf'
                                  }),
    **regular_op_with_shaped_data('op', [1, 3, 227, 227], {}),
    **result('result'),
    **result('result_2'),
}


class AddMeanScaleValuesTest(unittest.TestCase):
    def check_graph_attrs(self, graph: Graph, graph_ref: Graph,
Exemple #30
0
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

import numpy as np
import unittest
from generator import generator, generate

from extensions.ops.Cast import Cast
from mo.middle.passes.convert_data_type import packed_U4, packed_I4
from mo.middle.passes.infer import partial_infer
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import valued_const_with_data, regular_op_with_empty_data, \
    result, build_graph, connect

nodes = lambda value, dst_type: {
    **valued_const_with_data('value', np.array(value)),
    **regular_op_with_empty_data('convert', {'dst_type': dst_type, 'infer': Cast.infer}),
    **result(),
}


@generator
class CastTest(unittest.TestCase):
    """
    Example of checking:
        7 == 0111,           padded to 0111 0000, results in 112
        7 == 0111, 8 == 1000 packed to 0111 1000, results in 120

        -8 == 1000,          padded to 1000 0000, results in 128
    """