Example #1
0
 def test_squeeze_squeeze_dims(self, input_value, input_shape, squeeze_dims,
                               ref_value, ref_shape):
     graph = build_graph(
         nodes_attributes, [('data', 'squeeze'),
                            ('squeeze_dims', 'squeeze_dims_data'),
                            ('squeeze_dims_data', 'squeeze'),
                            ('squeeze', 'data_out')], {
                                'data': {
                                    'shape': input_shape,
                                    'value': input_value
                                },
                                'squeeze_dims': {
                                    'value': squeeze_dims,
                                    'shape': squeeze_dims.shape
                                },
                                'squeeze_dims_data': {
                                    'value': squeeze_dims,
                                    'shape': squeeze_dims.shape
                                },
                            })
     node = Node(graph, 'squeeze')
     if ref_shape is None:  # the test should fail
         with self.assertRaises(Error):
             Squeeze.infer(node)
     else:
         Squeeze.infer(node)
         if ref_value is not None:
             self.assertTrue(
                 strict_compare_tensors(
                     node.out_port(0).data.get_value(), ref_value))
         self.assertTrue(
             strict_compare_tensors(
                 node.out_port(0).data.get_shape(), ref_shape))
Example #2
0
 def test_reshape_infer(self, input_value, input_shape, output_shape,
                        ref_value, ref_shape):
     graph = build_graph(
         nodes_attributes, [('input', 'data'), ('data', 'reshape'),
                            ('output_shape', 'output_shape_data'),
                            ('output_shape_data', 'reshape'),
                            ('reshape', 'reshape_out')], {
                                'data': {
                                    'shape': input_shape,
                                    'value': input_value
                                },
                                'output_shape': {
                                    'value': output_shape,
                                    'shape': output_shape.shape
                                },
                                'output_shape_data': {
                                    'value': output_shape,
                                    'shape': output_shape.shape
                                },
                            })
     node = Node(graph, 'reshape')
     Reshape.infer(node)
     if ref_value is not None:
         self.assertTrue(
             strict_compare_tensors(
                 node.out_port(0).data.get_value(), shape_array(ref_value)))
     self.assertTrue(
         strict_compare_tensors(
             node.out_port(0).data.get_shape(), shape_array(ref_shape)))
Example #3
0
    def infer(node: Node):
        node_name = node.soft_get('name', node.id)

        input_shape = node.in_port(0).data.get_shape()
        indices_shape = node.in_port(1).data.get_shape()
        updates_shape = node.in_port(2).data.get_shape()
        assert input_shape is not None and updates_shape is not None and indices_shape is not None, \
            'The node "{}" input shape is None'.format(node_name)

        # check that shapes are correct
        # 1. ranks of both input and indices must be at least 1
        assert len(input_shape) >= 1 and len(indices_shape) >= 1, \
            'The node "{}" input and indices ranks must be at least 1'.format(node_name)

        # 2. the last dimension of indices shape must be at most a rank of input
        assert not is_fully_defined(indices_shape[-1]) or indices_shape[-1] <= len(input_shape), \
            'The last dimension of indices shape must be at most a rank of input for the node "{}"'.format(node_name)

        # 3. updates is a tensor of shape indices_shape[:-1] + input_shape[indices_shape[-1]:]
        # if expected updates shape is scalar, updates can be tensor with the single element (for example, of shape
        # [1], [[1]], etc.)
        expected_updates_shape = np.ma.concatenate((indices_shape[:-1], input_shape[indices_shape[-1]:]), axis=0)
        assert compatible_shapes(updates_shape, expected_updates_shape) or \
               (strict_compare_tensors(expected_updates_shape, []) and
                strict_compare_tensors(updates_shape, np.ones(len(updates_shape), dtype=np.int64))), \
            'The updates shape must be equal to indices_shape[:-1] + input_shape[indices_shape[-1]:] for the node ' \
            '"{}"'.format(node_name)

        node.out_port(0).data.set_shape(input_shape)
Example #4
0
 def test_two_inputs_two_shapes_positive_1(self):
     shape_1 = [1, 2, 3, 4]
     shape_2 = [4, 3, 2, 1]
     inputs = {'node_1': [{'shape': shape_1}], 'node_4': [{'shape': shape_2}]}
     nodes = {
         'input_1': {'type': 'Identity', 'kind': 'op', 'op': 'Parameter'},
         'input_2': {'type': 'Identity', 'kind': 'op', 'op': 'Parameter'},
         'node_1': {'type': 'Identity', 'kind': 'op', 'op': 'NotPlaceholder'},
         'node_2': {'type': 'Identity', 'kind': 'op', 'op': 'NotPlaceholder'},
         'node_3': {'type': 'Identity', 'kind': 'op', 'op': 'NotPlaceholder'},
         'node_4': {'type': 'Identity', 'kind': 'op', 'op': 'NotPlaceholder'},
         'output': {'kind': 'op', 'op': 'Result'}
     }
     edges = [
         ('input_1', 'node_1'),
         ('node_1', 'node_2'),
         ('node_3', 'output'),
         ('input_2', 'node_4'),
         ('node_4', 'output')
     ]
     graph = build_graph(nodes, edges)
     add_input_ops(graph=graph, user_defined_inputs=inputs, before_infer=True)
     new_input_1 = list(graph.in_edges('node_1'))[0][0]
     new_input_2 = list(graph.in_edges('node_4'))[0][0]
     self.assertFalse(graph.node['input_1']['is_input'])
     self.assertTrue(graph.node[new_input_1]['is_input'])
     self.assertTrue(graph.node[new_input_2]['is_input'])
     self.assertTrue((new_input_1, 'node_1') in graph.edges())
     self.assertTrue((new_input_2, 'node_4') in graph.edges())
     self.assertTrue(strict_compare_tensors(shape_1, graph.node[new_input_1]['shape']))
     self.assertTrue(strict_compare_tensors(shape_2, graph.node[new_input_2]['shape']))
Example #5
0
    def test_unsqueeze_infer(self, input_shape, unsq_dims, output_shape,
                             ref_uns_dims, input_value, output_value):
        graph = build_graph(
            self.nodes_attributes, [('data_1', 'unsq'),
                                    ('unsq_dims_const', 'unsq_dims'),
                                    ('unsq_dims', 'unsq'), ('unsq', 'data_2')],
            {
                'data_1': {
                    'shape': input_shape,
                    'value': input_value
                },
                'unsq_dims': {
                    'value': unsq_dims,
                    'shape': unsq_dims.shape
                },
                'unsq_dims_const': {
                    'value': unsq_dims,
                    'shape': unsq_dims.shape
                },
            })

        graph_ref = build_graph(
            self.nodes_attributes, [('data_1', 'unsq'),
                                    ('unsq_dims_const', 'unsq_dims'),
                                    ('unsq_dims', 'unsq'), ('unsq', 'data_2')],
            {
                'data_1': {
                    'shape': input_shape,
                    'value': input_value
                },
                'unsq_dims': {
                    'value': ref_uns_dims,
                    'shape': ref_uns_dims.shape
                },
                'unsq_dims_const': {
                    'value': ref_uns_dims,
                    'shape': ref_uns_dims.shape
                },
                'data_2': {
                    'shape': output_shape,
                    'value': output_value
                },
            })

        unsqueeze_node = Node(graph, 'unsq')
        Unsqueeze.infer(unsqueeze_node)

        (flag, resp) = compare_graphs(graph, graph_ref, 'data_2')
        self.assertTrue(flag, resp)
        self.assertTrue(
            strict_compare_tensors(
                Node(graph, 'data_2').shape,
                Node(graph_ref, 'data_2').shape))
        if Node(graph_ref, 'data_2').value is not None:
            self.assertTrue(
                strict_compare_tensors(
                    Node(graph, 'data_2').value,
                    Node(graph_ref, 'data_2').value))
    def build_and_test_shape_inference(self,
                                       input_indices_sparse_shape,
                                       input_actual_shape,
                                       new_shape,
                                       ref_out_shape,
                                       input_indices=None,
                                       ref_out_indices=None):
        # sparse tensor is stored in COO format
        nodes = {
            **shaped_parameter('input_indices',
                               shape_array(input_indices_sparse_shape), {
                                   'value': input_indices
                               }),
            **valued_const_with_data('input_shape',
                                     shape_array(input_actual_shape)),
            **valued_const_with_data('new_shape', shape_array(new_shape)),
            **regular_op_with_empty_data(
                'sparse_reshape_node', {
                    'op': 'SparseReshape',
                    'special_zero': True,
                    'infer': SparseReshape.infer
                }),
            **empty_data('sparse_reshape_node_d:out_port_1'),
            **result('output_indices'),
            **result('output_shape'),
        }

        edges = [
            *connect('input_indices', '0:sparse_reshape_node'),
            *connect('input_shape', '1:sparse_reshape_node'),
            *connect('new_shape', '2:sparse_reshape_node'),
            *connect('sparse_reshape_node:0', 'output_indices'),
            ('sparse_reshape_node', 'sparse_reshape_node_d:out_port_1', {
                'out': 1
            }),
            ('sparse_reshape_node_d:out_port_1', 'output_shape', {
                'in': 0
            }),
        ]

        graph = build_graph(
            nodes,
            edges,
            update_attributes={'input_indices_d': {
                'value': input_indices
            }})
        graph.stage = 'middle'
        partial_infer(graph)

        node = Node(graph, 'sparse_reshape_node')
        output_indices = node.out_port(0).data.get_value()
        actual_output_shape = node.out_port(1).data.get_value()
        self.assertTrue(
            strict_compare_tensors(actual_output_shape, ref_out_shape))
        self.assertTrue(strict_compare_tensors(output_indices,
                                               ref_out_indices))
Example #7
0
    def test_slice_infer(self, inp_value, inp_shape, starts, ends, axes, steps,
                         expected_value, expected_shape):
        if inp_value is None:
            input_node = shaped_data('data_1', int64_array(inp_shape))
        else:
            input_node = valued_data('data_1', int64_array(inp_value))
        if inp_value is not None and inp_shape is not None:
            assert np.array_equal(np.array(inp_value).shape, inp_shape)

        def convert_args(val, name=''):
            if val is not None:
                return valued_const_with_data(name, int64_array(val))
            else:
                return shaped_const_with_data(name, [0])  #fake shape

        starts = convert_args(starts, 'starts')
        ends = convert_args(ends, 'ends')
        axes = convert_args(axes, 'axes')
        steps = convert_args(steps, 'steps')
        if expected_shape is not None:
            expected_shape = shape_array(expected_shape)

        nodes = {
            **input_node,
            **regular_op_with_empty_data('slice', {'op': 'Slice'}),
            **starts,
            **ends,
            **axes,
            **steps,
        }

        graph = build_graph(
            nodes,
            [('data_1', 'slice'), *connect('starts', '1:slice'),
             *connect('ends', '2:slice'), *connect('axes', '3:slice'),
             *connect('steps', '4:slice'), *connect('slice', 'slice_d')])

        graph.stage = 'middle'
        slice_node = Node(graph, 'slice')

        Slice.infer(slice_node)
        if expected_value is not None:
            self.assertTrue(
                strict_compare_tensors(slice_node.out_node().value,
                                       expected_value))
        self.assertTrue(
            strict_compare_tensors(slice_node.out_node().shape,
                                   expected_shape))
Example #8
0
 def test_region_infer_dynamic_flatten(self):
     graph = build_graph(
         nodes_attributes, [('node_1', 'region'), ('region', 'node_3'),
                            ('node_3', 'op_output')],
         {
             'node_3': {
                 'shape': None,
                 'value': None
             },
             'node_1': {
                 'shape': shape_array(
                     [1, dynamic_dimension_value, 227, 227])
             },
             'region': {
                 'end_axis': 1,
                 'axis': 0,
                 'do_softmax': 1,
                 **layout_attrs()
             }
         })
     graph.graph['layout'] = 'NCHW'
     reorg_node = Node(graph, 'region')
     RegionYoloOp.regionyolo_infer(reorg_node)
     exp_shape = shape_array([dynamic_dimension_value, 227, 227])
     res_shape = graph.node['node_3']['shape']
     self.assertTrue(strict_compare_tensors(exp_shape, res_shape))
Example #9
0
    def test_caffe_conv2d_dynamic_input_infer(self):
        graph = build_graph(nodes_attributes,
                            [('conv_input', 'conv_node'),
                             ('conv_weights', 'conv_node'),
                             ('conv_node', 'conv_output'),
                             ('conv_output', 'op_output')
                             ],
                            {'conv_output': {'shape': None},
                             'conv_input': {'shape': shape_array([1, 3, dynamic_dimension_value, 227])},
                             'conv_weights': {'shape': np.array([64, 3, 3, 3]),
                                              'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
                             'conv_node': {'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
                                           'conv_pad': np.array([[0, 0], [0, 0], [0, 0], [0, 0]]),
                                           'dilation': np.array([1, 1, 1, 1]), 'bias_addable': True, 'bias_term': False,
                                           'output_spatial_shape': None, 'output_shape': None,
                                           'stride': np.array([1, 1, 1, 1]), 'group': 1,
                                           'kernel_spatial_idx': np.array([2, 3]),
                                           'input_feature_channel': 1,
                                           'output_feature_channel': 0,
                                           'output': 64, 'kernel_spatial': np.array([3, 3]),
                                           'spatial_dims': np.array([2, 3]), 'channel_dims': np.array([1]),
                                           'batch_dims': np.array([0])}
                             })

        conv_node = Node(graph, 'conv_node')
        Convolution.infer(conv_node)
        exp_shape = shape_array([1, 64, dynamic_dimension_value, 225])
        res_shape = graph.node['conv_output']['shape']
        self.assertTrue(strict_compare_tensors(exp_shape, res_shape))
    def check_shape_infer(self, data_shape, indices_shape, axis, ref):
        nodes = {
            **shaped_parameter('data', data_shape),
            **shaped_parameter('indices', indices_shape),
            **regular_op_with_empty_data('gather_elements', {
                'op': 'GatherElements',
                'axis': axis
            }),
            **result()
        }

        graph = build_graph(nodes_attrs=nodes,
                            edges=[
                                *connect('data', '0:gather_elements'),
                                *connect('indices', '1:gather_elements'),
                                *connect('gather_elements', 'output')
                            ],
                            nodes_with_edges_only=True)
        graph.stage = 'middle'

        gather_el_node = Node(graph, 'gather_elements')
        GatherElements.infer(gather_el_node)

        res_output_shape = gather_el_node.out_node().shape
        self.assertTrue(strict_compare_tensors(res_output_shape, ref))
Example #11
0
 def test_uni_directional_shape_broadcasting(self, input_shape,
                                             target_shape, expected_shape):
     result = uni_directional_shape_broadcasting(input_shape, target_shape)
     if expected_shape is None:
         self.assertIsNone(result)
     else:
         self.assertTrue(strict_compare_tensors(result, expected_shape))
Example #12
0
 def test_clarify_1(self):
     actual_result = clarify_partial_shape([
         shape_array([dynamic_dimension, 10, dynamic_dimension]),
         shape_array([4, dynamic_dimension, dynamic_dimension])
     ])
     ref_result = shape_array([4, 10, dynamic_dimension])
     assert strict_compare_tensors(actual_result, ref_result)
Example #13
0
    def test_split_reverse_infer(self):
        ref_input_shape = [7, 4, 6]
        axis = 2
        num_splits = 2
        output_shape_1 = [dynamic_dimension, 4, 3]
        output_shape_2 = [7, dynamic_dimension, 3]

        graph = build_graph(
            TestSplitOp.nodes, TestSplitOp.edges, {
                'split_input_data': {
                    'shape': None,
                    'value': None
                },
                'split_op': {
                    'axis': np.array(axis),
                    'num_splits': np.array(num_splits)
                },
                'split_output_0_data': {
                    'shape': shape_array(output_shape_1),
                    'value': None
                },
                'split_output_1_data': {
                    'shape': shape_array(output_shape_2),
                    'value': None
                },
            })

        split_node = Node(graph, 'split_op')
        AttributedSplit.reverse_infer(split_node)
        actual_input_shape = split_node.in_port(0).data.get_shape()
        self.assertTrue(
            strict_compare_tensors(ref_input_shape, actual_input_shape))
Example #14
0
    def test_concat_value_infer(self, value1, value2, output_value, axis):
        graph = build_graph(
            nodes_attributes, [('node_1', 'concat'), ('node_2', 'concat'),
                               ('concat', 'node_3'), ('node_3', 'op_output')],
            {
                'node_3': {
                    'shape': output_value.shape,
                    'value': output_value
                },
                'node_1': {
                    'shape': value1.shape,
                    'value': value1
                },
                'node_2': {
                    'shape': value2.shape,
                    'value': value2
                },
                'concat': {
                    'axis': axis
                }
            })

        concat_node = Node(graph, 'concat')
        concat_infer(concat_node)
        res_value = graph.node['node_3']['value']
        self.assertTrue(strict_compare_tensors(output_value, res_value))
Example #15
0
    def test_two_inputs_dynamic_value_infer(self):
        in_value = shape_array([dynamic_dimension_value, 3]).reshape(
            (1, 1, 1, 2))
        graph = build_graph(
            self.node_attrs,
            self.edge_attrs + [('pads_begin', 'pad'), ('pads_end', 'pad')],
            {'data_in': {
                'value': in_value,
                'shape': in_value.shape
            }},
            nodes_with_edges_only=True,
        )
        out_shape = (1, 1, 5, 8)
        mask = np.zeros(out_shape, dtype=np.bool)
        mask[0][0][1][2] = True
        ref_value = np.ma.masked_array(np.zeros(out_shape, dtype=np.int64),
                                       mask=mask,
                                       dtype=np.int64)
        ref_value[0][0][1][3] = 3

        pad_node = Node(graph, 'pad')
        Pad.infer(pad_node)
        output_value = Node(graph, 'data_out').value
        self.assertTrue(
            np.array_equal(Node(graph, 'data_out').shape, ref_value.shape))
        self.assertTrue(strict_compare_tensors(output_value, ref_value))
        self.assertTrue(isinstance(output_value, np.ma.masked_array))
        self.assertTrue(output_value[0][0][1][2] is dynamic_dimension)
Example #16
0
    def merge_infer(node: Node):
        # we infer only through executable input nodes
        inferred_nodes = [
            n for n in node.in_nodes().values() if n['is_partial_inferred']
        ]
        assert len(inferred_nodes) != 0
        tensor = inferred_nodes[0]

        if len(inferred_nodes) < len(node.in_nodes()):
            node['is_not_fully_inferred'] = True
        else:
            node['is_not_fully_inferred'] = False
            assert np.all(
                compatible_shapes(node.shape, inferred_nodes[0].shape)
                for node in inferred_nodes)

            inferred_and_executable = [
                n for n in node.in_nodes().values() if n['is_partial_inferred']
                and 'executable' in n and n['executable']
            ]
            if len(inferred_and_executable) > 0:
                tensor = inferred_and_executable[0]

                if all([
                        tensor.has_valid('value') and n.has_valid('value')
                        and strict_compare_tensors(tensor.value, n.value)
                        for n in inferred_and_executable
                ]):
                    node.out_node().value = tensor.value.copy()
                else:
                    node.out_node().value = None

        # do not use set_shape(tensor.shape) here because input port shape may be different from the calculated output
        # shape and `set_shape` will raise an error that shape has changed
        node.out_node(0).shape = shape_array(tensor.shape)
Example #17
0
    def test_splitv_dynamic_input(self):
        ref_input_shape = [7, 4, 11]
        axis = 2
        num_splits = 2
        output_shape_1 = [dynamic_dimension, 4, 3]
        output_shape_2 = [7, dynamic_dimension, 3]
        output_shape_3 = [7, dynamic_dimension, 5]

        graph = build_graph(TestAttributedVariadicSplitOp.nodes, TestAttributedVariadicSplitOp.edges,
                            {
                                'split_input_data': {'shape': None},
                                'split_op': {'axis': np.array(2), 'split_lengths': np.array([3, 3, 5]),
                                             'out_ports_count': 2},
                                'split_output_0_data': {'shape': shape_array(output_shape_1),
                                                        'value': None},
                                'split_output_1_data': {'shape': shape_array(output_shape_2),
                                                        'value': None},
                                'split_output_2_data': {'shape': shape_array(output_shape_3),
                                                        'value': None},
                            }
                            )
        node = Node(graph, 'split_op')
        for p in range(len(node.out_edges()), node.out_ports_count):
            node.add_output_port(p)

        AttributedVariadicSplit.reverse_infer(node)

        actual_input_shape = node.in_port(0).data.get_shape()
        self.assertTrue(strict_compare_tensors(ref_input_shape, actual_input_shape))
Example #18
0
    def build_and_test_reverse_inference(order, out_shape, ref_shape):
        nodes = {
            **shaped_parameter('data', None, {
                'reverse_infer': Parameter.reverse_infer
            }),
            **valued_const_with_data('order', int64_array(order)),
            **regular_op_with_empty_data(
                'transpose', {
                    'op': 'Transpose',
                    'infer': Transpose.infer,
                    'reverse_infer': Transpose.reverse_infer
                }),
            **result('res'),
        }

        edges = [
            *connect('data', '0:transpose'), *connect('order', '1:transpose'),
            *connect('transpose', 'res')
        ]

        graph = build_graph(nodes, edges)
        graph.stage = 'middle'
        Node(graph,
             'transpose').out_port(0).data.set_shape(shape_array(out_shape))

        partial_infer(graph)
        actual_shape = Node(graph, 'data').out_port(0).data.get_shape()
        assert strict_compare_tensors(actual_shape, shape_array(ref_shape))
Example #19
0
 def test_tf_space_to_depth_infer_nchw_dynamic(self):
     graph = build_graph(nodes, edges)
     graph.graph['layout'] = 'NCHW'
     graph.node['in_data_node']['shape'] = shape_array(
         [1, 64, dynamic_dimension_value, 1152])
     std_node = Node(graph, 'StD')
     SpaceToDepth.infer(std_node)
     exp_shape = shape_array([1, 256, dynamic_dimension_value, 576])
     res_shape = graph.node['out_data_node']['shape']
     self.assertTrue(strict_compare_tensors(exp_shape, res_shape))
Example #20
0
 def test_get_inputs(self):
     # Reference data for test:
     ref_input_dict = {'data': shape_array([1, 10, 16])}
     # Check function:
     inputs_dict = self.IR.get_inputs()
     self.assertTrue(
         strict_compare_tensors(ref_input_dict['data'],
                                inputs_dict['data']),
         'Test on function get_inputs failed')
     log.info('Test for function get_inputs passed')
Example #21
0
    def build_select_graph_and_infer(condition_value,
                                     then_value,
                                     else_value,
                                     out_value,
                                     condition_shape=None,
                                     then_shape=None,
                                     else_shape=None,
                                     out_shape=None,
                                     auto_broadcast='numpy',
                                     fw_format=None):
        if then_value is not None:
            then_shape = int64_array(then_value.shape)
        if else_value is not None:
            else_shape = int64_array(else_value.shape)

        nodes = {
            **valued_const_with_data('then', then_value, then_shape),
            **valued_const_with_data('else', else_value, else_shape),
            **valued_const_with_data('condition', condition_value, condition_shape),
            **regular_op_with_empty_data(
                'select', {
                    'op': 'Select',
                    'auto_broadcast': auto_broadcast,
                    'format': fw_format
                }),
            **result('out'),
        }
        edges = [
            *connect('condition', '0:select'),
            *connect('then', '1:select'),
            *connect('else', '2:select'),
            *connect('select', 'out'),
        ]
        graph = build_graph(nodes, edges)

        select_node = Node(graph, 'select')
        Select.infer(select_node)

        select_out_node = Node(graph, 'select_d')

        value_desc = 'values'
        ref_val = out_value
        actual_val = select_out_node['value']
        if out_shape is not None:
            value_desc = 'shapes'
            ref_val = out_shape
            actual_val = select_out_node['shape']
            assert select_out_node[
                'value'] is None, "if 'out_shape' is defined manually 'value' must be None"

        flag = strict_compare_tensors(actual_val, ref_val)
        msg = '' if flag else 'reference {} and actual {} {} do not match\n'.format(
            ref_val, actual_val, value_desc)
        return flag, msg
Example #22
0
 def _set_shape(self, shape):
     if self.node.graph.stage == 'front':
         raise NotImplementedError("set_shape not implemented for front phase")
     else:
         if self.type == 'in':
             assert self.node.in_node(self.idx, control_flow=self.control_flow).value is None
             self.node.in_node(self.idx, control_flow=self.control_flow).shape = shape_array(shape)
         else:
             data_node = self.node.out_node(self.idx, control_flow=self.control_flow)
             assert data_node.value is None or self.node.has_and_set('override_output_shape') or \
                    strict_compare_tensors(data_node.soft_get('force_shape', data_node.shape), shape_array(shape))
             self.node.out_node(self.idx, control_flow=self.control_flow).shape = shape_array(shape)
Example #23
0
    def test_split_dynamic_shape_infer(self):
        #  test configuration
        input_shape = [2, dynamic_dimension_value]
        input_value = None
        axis = 1
        num_splits = 2
        output_shape = [2, dynamic_dimension_value]
        output_value = [None, None]

        # action
        graph = build_graph(
            self.nodes, self.edges, {
                'split_input_data': {
                    'shape': shape_array(input_shape),
                    'value': input_value
                },
                'split_op': {
                    'axis': np.array(axis),
                    'num_splits': np.array(num_splits)
                },
            })

        split_op = Node(graph, 'split_op')
        AttributedSplit.infer(split_op)

        # reference
        graph_ref = build_graph(
            self.nodes, self.edges, {
                'split_input_data': {
                    'shape': shape_array(input_shape),
                    'value': input_value
                },
                'split_op': {
                    'axis': np.array(axis),
                    'num_splits': np.array(num_splits)
                },
                'split_output_0_data': {
                    'shape': shape_array(output_shape),
                    'value': output_value[0]
                },
                'split_output_1_data': {
                    'shape': shape_array(output_shape),
                    'value': output_value[1]
                },
            })

        # check
        (flag, resp) = compare_graphs(graph, graph_ref, 'split_input_data')
        self.assertTrue(flag, resp)
        self.assertTrue(
            strict_compare_tensors(
                Node(graph, 'split_output_0_data').shape,
                shape_array(output_shape)))
Example #24
0
    def build_and_test_reverse_inference(inp_shape_1,
                                         inp_shape_2,
                                         out_shape,
                                         ref_shape,
                                         auto_broadcast='numpy'):
        in_port_with_defined_shape = 0 if inp_shape_1 is not None else 1
        defined_shape = shape_array(
            inp_shape_1 if inp_shape_1 is not None else inp_shape_2)

        nodes = {
            **shaped_parameter('undefined_shape_data', None, {
                'reverse_infer': Parameter.reverse_infer
            }),
            **shaped_parameter('data', shape_array(defined_shape), {
                                   'reverse_infer': Parameter.reverse_infer
                               }),
            **regular_op_with_empty_data(
                'elementwise', {
                    'op': 'Add',
                    'type': 'Add',
                    'infer': eltwise_infer,
                    'reverse_infer': eltwise_reverse_infer,
                    'auto_broadcast': auto_broadcast
                }),
            **result('res'),
        }

        edges = [
            *connect(
                'undefined_shape_data', '{}:elementwise'.format(
                    int(not in_port_with_defined_shape))),
            *connect('data',
                     '{}:elementwise'.format(in_port_with_defined_shape)),
            *connect('elementwise', 'res')
        ]

        graph = build_graph(nodes, edges)
        graph.stage = 'middle'
        Node(graph,
             'elementwise').out_port(0).data.set_shape(shape_array(out_shape))
        Node(graph, 'elementwise').in_port(
            in_port_with_defined_shape).data.set_shape(defined_shape)

        partial_infer(graph)
        actual_shape = Node(
            graph, 'undefined_shape_data').out_port(0).data.get_shape()
        if ref_shape is None:
            assert actual_shape == ref_shape
        else:
            assert strict_compare_tensors(actual_shape, shape_array(ref_shape))
Example #25
0
    def test_eltwise_infer(self, value1, shape1, value2, shape2, shape_infer,
                           exp_value, exp_shape):
        graph = build_graph(
            nodes_attributes, [('node_1', 'eltw_1'), ('node_2', 'eltw_1'),
                               ('eltw_1', 'node_3'), ('node_3', 'op_output')],
            {
                'node_3': {
                    'shape': None
                },
                'node_1': {
                    'shape':
                    shape_array(value1).shape
                    if value1 is not None else shape_array(shape1),
                    'value':
                    value1
                },
                'node_2': {
                    'shape':
                    shape_array(value2).shape
                    if value2 is not None else shape_array(shape2),
                    'value':
                    value2
                }
            })

        graph.graph['layout'] = 'NCHW'

        eltwise_node = Node(graph, 'eltw_1')

        eltwise_infer(eltwise_node, shape_infer)
        res_shape = graph.node['node_3']['shape']
        res_value = eltwise_node.out_node().value
        if exp_value is not None:
            self.assertTrue(
                strict_compare_tensors(res_value, shape_array(exp_value)))
        self.assertTrue(
            strict_compare_tensors(res_shape, shape_array(exp_shape)))
Example #26
0
    def test_deconv_dynamic_infer_ideal(self):
        graph = build_graph(nodes_attributes,
                            [('conv_input', 'conv_node'),
                             ('conv_weights', 'conv_node'),
                             ('conv_node', 'conv_output'),
                             ('conv_output', 'op_output')
                             ],
                            {'conv_output': {'shape': None},
                             'conv_input': {'shape': shape_array([1, 21, dynamic_dimension_value, 16])},
                             'conv_weights': {'shape': np.array([1, 21, 4, 4]),
                                              'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
                             'conv_node': {#'spatial_dims': np.array([2, 3]), 'batch_dims': np.array([0]),
                                           'channel_dims': np.array([1]), 'bias_addable': True, 'bias_term': False,
                                           'batch_dims': np.array([0]),
                                           'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
                                           'kernel_spatial': np.array([4, 4]), 'output_spatial_shape': None,
                                           'kernel_spatial_idx': np.array([2, 3]),
                                           'input_feature_channel': 1,
                                           'output_feature_channel': 0,
                                           'output_padding': np.array([0, 0, 1, 1]),
                                           'type': 'Deconvolution', 'output': 21, 'dilation': np.array([1, 1, 1, 1]),
                                           'group': 1, 'stride': np.array([1, 1, 2, 2]), 'output_shape': None}
                             })

        deconv_node = Node(graph, 'conv_node')

        Convolution.infer(deconv_node)
        res_shape = deconv_node['output_shape']
        exp_shape = shape_array([1, 21, dynamic_dimension_value, 35])

        self.assertTrue(strict_compare_tensors(exp_shape, res_shape))

        # Check that after double infer shape and pad attrs do not changes
        Convolution.infer(deconv_node)

        self.assertTrue(strict_compare_tensors(exp_shape, res_shape))
    def test_expand_dims_infer(self, axis, ref_out_shape):
        graph = build_graph(nodes_attributes, [('data_1', 'expand_dims'),
                                               ('expand_dims', 'data_2')],
                            {'expand_dims': {
                                'expand_axis': axis
                            }})
        Node(graph, 'data_1').shape = shape_array(
            [2, 3, dynamic_dimension_value, 224])
        expand_dims_node = Node(graph, 'expand_dims')

        ExpandDims.infer(expand_dims_node)

        self.assertTrue(
            strict_compare_tensors(expand_dims_node.out_node().shape,
                                   shape_array(ref_out_shape)))
Example #28
0
    def test_partial_infer_gather_slice_batch_dims2_dynamic3(self):
        nodes_attributes['gathernd_node']['batch_dims'] = 2
        graph = build_graph(nodes_attributes, edges, inputs11)
        gathernd_node = Node(graph, 'gathernd_node')
        GatherND.infer(gathernd_node)

        # prepare reference results
        ref_output_shape = shape_array([dynamic_dimension_value, 3, 5, 9])

        # get the result
        res_output_shape = graph.node['output']['shape']

        self.assertTrue(
            strict_compare_tensors(ref_output_shape, res_output_shape),
            'values do not match expected: {} and given: {}'.format(
                ref_output_shape, res_output_shape))
Example #29
0
    def test_upsample_with_scales_infer(self, scales, input_shape, expected_shape):
        graph = build_graph(nodes_attributes,
                            [('node_1', 'upsample'),
                             ('upsample', 'node_3'),
                             ('node_3', 'op_output')
                             ],
                            {'node_3': {'shape': None, 'value': None},
                             'node_1': {'shape': input_shape},
                             'upsample': {'mode': 'linear',
                                          'height_scale': scales[2],
                                          'width_scale': scales[3]}
                             })

        graph.graph['layout'] = 'NCHW'
        upsample_node = Node(graph, 'upsample')
        UpsampleOp.upsample_infer(upsample_node)
        res_shape = graph.node['node_3']['shape']
        self.assertTrue(strict_compare_tensors(expected_shape, res_shape))
Example #30
0
    def infer(node: Node):
        name = node.soft_get('name', node.id)

        input_indices_shape = node.in_port(0).data.get_shape()
        input_indices_value = node.in_port(0).data.get_value()
        input_shape = node.in_port(1).data.get_value()
        new_shape = node.in_port(2).data.get_value()
        new_shape_shape = node.in_port(2).data.get_shape()

        assert input_shape is not None and new_shape is not None, \
            "Values for input shape and new shape must be defined"
        assert len(np.argwhere(new_shape == -1)) <= 1, \
            "Value -1 occurs in new shape value more than once"
        assert len(np.argwhere(new_shape < -1)) == 0, \
            "Only non-negative or -1 values are allowed"

        output_shape = np.ma.masked_array(new_shape,
                                          mask=new_shape == -1,
                                          fill_value=dynamic_dimension_value)
        assert not is_fully_defined(input_shape) or not is_fully_defined(output_shape) or \
               np.prod(input_shape) == np.prod(output_shape), \
            "Number of elements in input {} and output {} of dynamic reshape node {} mismatch" \
            "".format(input_shape, output_shape, name)

        # we can deduce -1 only if input_shape is fully defined and
        # there is one dynamic dimension in output_shape
        if is_fully_defined(input_shape) and np.ma.count_masked(
                output_shape) == 1:
            undefined_dim_size = np.prod(input_shape) // np.prod(output_shape)

            undefined_idx = np.where(output_shape == dynamic_dimension)[0][0]
            output_shape[undefined_idx] = undefined_dim_size
            output_shape.mask[undefined_idx] = False

        node.out_port(1).data.set_value(shape_array(output_shape))
        output_indices_shape = np.concatenate(
            (input_indices_shape[0:1], new_shape_shape))
        node.out_port(0).data.set_shape(output_indices_shape)

        # TODO: implement constant value propagation for common case with scipy.sparse.coo_matrix.reshape
        # instead of compatible_shapes we intentionally use np.array_equal
        if strict_compare_tensors(
                input_shape, output_shape) and input_indices_value is not None:
            node.out_port(0).data.set_value(input_indices_value)