Пример #1
0
 def test_squeeze_squeeze_dims(self, input_value, input_shape, squeeze_dims,
                               ref_value, ref_shape):
     graph = build_graph(
         nodes_attributes, [('data', 'squeeze'),
                            ('squeeze_dims', 'squeeze_dims_data'),
                            ('squeeze_dims_data', 'squeeze'),
                            ('squeeze', 'data_out')], {
                                'data': {
                                    'shape': input_shape,
                                    'value': input_value
                                },
                                'squeeze_dims': {
                                    'value': squeeze_dims,
                                    'shape': squeeze_dims.shape
                                },
                                'squeeze_dims_data': {
                                    'value': squeeze_dims,
                                    'shape': squeeze_dims.shape
                                },
                            })
     node = Node(graph, 'squeeze')
     if ref_shape is None:  # the test should fail
         with self.assertRaises(Error):
             Squeeze.infer(node)
     else:
         Squeeze.infer(node)
         if ref_value is not None:
             self.assertTrue(
                 strict_compare_tensors(
                     node.out_port(0).data.get_value(), ref_value))
         self.assertTrue(
             strict_compare_tensors(
                 node.out_port(0).data.get_shape(), ref_shape))
Пример #2
0
 def test_reshape_infer(self, input_value, input_shape, output_shape,
                        ref_value, ref_shape):
     graph = build_graph(
         nodes_attributes, [('input', 'data'), ('data', 'reshape'),
                            ('output_shape', 'output_shape_data'),
                            ('output_shape_data', 'reshape'),
                            ('reshape', 'reshape_out')], {
                                'data': {
                                    'shape': input_shape,
                                    'value': input_value
                                },
                                'output_shape': {
                                    'value': output_shape,
                                    'shape': output_shape.shape
                                },
                                'output_shape_data': {
                                    'value': output_shape,
                                    'shape': output_shape.shape
                                },
                            })
     node = Node(graph, 'reshape')
     Reshape.infer(node)
     if ref_value is not None:
         self.assertTrue(
             strict_compare_tensors(
                 node.out_port(0).data.get_value(), shape_array(ref_value)))
     self.assertTrue(
         strict_compare_tensors(
             node.out_port(0).data.get_shape(), shape_array(ref_shape)))
Пример #3
0
    def test_unsqueeze_infer(self, input_shape, unsq_dims, output_shape,
                             ref_uns_dims, input_value, output_value):
        graph = build_graph(
            self.nodes_attributes, [('data_1', 'unsq'),
                                    ('unsq_dims_const', 'unsq_dims'),
                                    ('unsq_dims', 'unsq'), ('unsq', 'data_2')],
            {
                'data_1': {
                    'shape': input_shape,
                    'value': input_value
                },
                'unsq_dims': {
                    'value': unsq_dims,
                    'shape': unsq_dims.shape
                },
                'unsq_dims_const': {
                    'value': unsq_dims,
                    'shape': unsq_dims.shape
                },
            })

        graph_ref = build_graph(
            self.nodes_attributes, [('data_1', 'unsq'),
                                    ('unsq_dims_const', 'unsq_dims'),
                                    ('unsq_dims', 'unsq'), ('unsq', 'data_2')],
            {
                'data_1': {
                    'shape': input_shape,
                    'value': input_value
                },
                'unsq_dims': {
                    'value': ref_uns_dims,
                    'shape': ref_uns_dims.shape
                },
                'unsq_dims_const': {
                    'value': ref_uns_dims,
                    'shape': ref_uns_dims.shape
                },
                'data_2': {
                    'shape': output_shape,
                    'value': output_value
                },
            })

        unsqueeze_node = Node(graph, 'unsq')
        Unsqueeze.infer(unsqueeze_node)

        (flag, resp) = compare_graphs(graph, graph_ref, 'data_2')
        self.assertTrue(flag, resp)
        self.assertTrue(
            strict_compare_tensors(
                Node(graph, 'data_2').shape,
                Node(graph_ref, 'data_2').shape))
        if Node(graph_ref, 'data_2').value is not None:
            self.assertTrue(
                strict_compare_tensors(
                    Node(graph, 'data_2').value,
                    Node(graph_ref, 'data_2').value))
Пример #4
0
    def test_deconv_dynamic_infer_ideal(self):
        graph = build_graph(
            nodes_attributes,
            [('conv_input', 'conv_node'), ('conv_weights', 'conv_node'),
             ('conv_node', 'conv_output'), ('conv_output', 'op_output')],
            {
                'conv_output': {
                    'shape': None
                },
                'conv_input': {
                    'shape': shape_array([1, 21, dynamic_dimension_value, 16])
                },
                'conv_weights': {
                    'shape':
                    np.array([1, 21, 4, 4]),
                    'dim_attrs':
                    ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']
                },
                'conv_node':
                {  #'spatial_dims': np.array([2, 3]), 'batch_dims': np.array([0]),
                    'channel_dims': np.array([1]),
                    'bias_addable': True,
                    'bias_term': False,
                    'batch_dims': np.array([0]),
                    'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
                    'kernel_spatial': np.array([4, 4]),
                    'output_spatial_shape': None,
                    'kernel_spatial_idx': np.array([2, 3]),
                    'input_feature_channel': 1,
                    'output_feature_channel': 0,
                    'output_padding': np.array([0, 0, 1, 1]),
                    'type': 'Deconvolution',
                    'output': 21,
                    'dilation': np.array([1, 1, 1, 1]),
                    'group': 1,
                    'stride': np.array([1, 1, 2, 2]),
                    'output_shape': None
                }
            })

        deconv_node = Node(graph, 'conv_node')

        Convolution.infer(deconv_node)
        res_shape = deconv_node['output_shape']
        exp_shape = shape_array([1, 21, dynamic_dimension_value, 35])

        self.assertTrue(strict_compare_tensors(exp_shape, res_shape))

        # Check that after double infer shape and pad attrs do not changes
        Convolution.infer(deconv_node)

        self.assertTrue(strict_compare_tensors(exp_shape, res_shape))
Пример #5
0
    def test_slice_infer(self, inp_value, inp_shape, starts, ends, axes, steps,
                         expected_value, expected_shape):
        if inp_value is None:
            input_node = shaped_data('data_1', int64_array(inp_shape))
        else:
            input_node = valued_data('data_1', int64_array(inp_value))
        if inp_value is not None and inp_shape is not None:
            assert np.array_equal(np.array(inp_value).shape, inp_shape)

        def convert_args(val, name=''):
            if val is not None:
                return valued_const_with_data(name, int64_array(val))
            else:
                return shaped_const_with_data(name, [0])  #fake shape

        starts = convert_args(starts, 'starts')
        ends = convert_args(ends, 'ends')
        axes = convert_args(axes, 'axes')
        steps = convert_args(steps, 'steps')
        if expected_shape is not None:
            expected_shape = shape_array(expected_shape)

        nodes = {
            **input_node,
            **regular_op_with_empty_data('slice', {'op': 'Slice'}),
            **starts,
            **ends,
            **axes,
            **steps,
        }

        graph = build_graph(
            nodes,
            [('data_1', 'slice'), *connect('starts', '1:slice'),
             *connect('ends', '2:slice'), *connect('axes', '3:slice'),
             *connect('steps', '4:slice'), *connect('slice', 'slice_d')])

        graph.stage = 'middle'
        slice_node = Node(graph, 'slice')

        Slice.infer(slice_node)
        if expected_value is not None:
            self.assertTrue(
                strict_compare_tensors(slice_node.out_node().value,
                                       expected_value))
        self.assertTrue(
            strict_compare_tensors(slice_node.out_node().shape,
                                   expected_shape))
Пример #6
0
    def test_two_inputs_dynamic_value_infer(self):
        in_value = shape_array([dynamic_dimension_value, 3]).reshape(
            (1, 1, 1, 2))
        graph = build_graph(
            self.node_attrs,
            self.edge_attrs + [('pads_begin', 'pad'), ('pads_end', 'pad')],
            {'data_in': {
                'value': in_value,
                'shape': in_value.shape
            }},
            nodes_with_edges_only=True,
        )
        out_shape = (1, 1, 5, 8)
        mask = np.zeros(out_shape, dtype=np.bool)
        mask[0][0][1][2] = True
        ref_value = np.ma.masked_array(np.zeros(out_shape, dtype=np.int64),
                                       mask=mask,
                                       dtype=np.int64)
        ref_value[0][0][1][3] = 3

        pad_node = Node(graph, 'pad')
        Pad.infer(pad_node)
        output_value = Node(graph, 'data_out').value
        self.assertTrue(
            np.array_equal(Node(graph, 'data_out').shape, ref_value.shape))
        self.assertTrue(strict_compare_tensors(output_value, ref_value))
        self.assertTrue(isinstance(output_value, np.ma.masked_array))
        self.assertTrue(output_value[0][0][1][2] is dynamic_dimension)
Пример #7
0
    def test_upsample_with_second_input_infer(self, scales, input_shape,
                                              expected_shape):
        nodes_attributes['scales'] = {'kind': 'data', 'value': scales}
        graph = build_graph(
            nodes_attributes, [('node_1', 'upsample'), ('scales', 'upsample'),
                               ('upsample', 'node_3'),
                               ('node_3', 'op_output')], {
                                   'node_3': {
                                       'shape': None,
                                       'value': None
                                   },
                                   'node_1': {
                                       'shape': input_shape
                                   },
                                   'upsample': {
                                       'mode': 'linear',
                                       'height_scale': None,
                                       'width_scale': None
                                   }
                               })

        graph.graph['layout'] = 'NCHW'
        upsample_node = Node(graph, 'upsample')
        UpsampleOp.upsample_infer(upsample_node)
        res_shape = graph.node['node_3']['shape']
        self.assertTrue(strict_compare_tensors(expected_shape, res_shape))
Пример #8
0
 def test_region_infer_dynamic_flatten(self):
     graph = build_graph(
         nodes_attributes, [('node_1', 'region'), ('region', 'node_3'),
                            ('node_3', 'op_output')],
         {
             'node_3': {
                 'shape': None,
                 'value': None
             },
             'node_1': {
                 'shape': shape_array(
                     [1, dynamic_dimension_value, 227, 227])
             },
             'region': {
                 'end_axis': 1,
                 'axis': 0,
                 'do_softmax': 1,
                 **layout_attrs()
             }
         })
     graph.graph['layout'] = 'NCHW'
     reorg_node = Node(graph, 'region')
     RegionYoloOp.regionyolo_infer(reorg_node)
     exp_shape = shape_array([dynamic_dimension_value, 227, 227])
     res_shape = graph.node['node_3']['shape']
     self.assertTrue(strict_compare_tensors(exp_shape, res_shape))
Пример #9
0
 def test_uni_directional_shape_broadcasting(self, input_shape,
                                             target_shape, expected_shape):
     result = uni_directional_shape_broadcasting(input_shape, target_shape)
     if expected_shape is None:
         self.assertIsNone(result)
     else:
         self.assertTrue(strict_compare_tensors(result, expected_shape))
Пример #10
0
    def merge_infer(node: Node):
        # we infer only through executable input nodes
        inferred_nodes = [
            n for n in node.in_nodes().values() if n['is_partial_inferred']
        ]
        assert len(inferred_nodes) != 0
        tensor = inferred_nodes[0]

        if len(inferred_nodes) < len(node.in_nodes()):
            node['is_not_fully_inferred'] = True
        else:
            node['is_not_fully_inferred'] = False
            assert np.all(
                compatible_shapes(node.shape, inferred_nodes[0].shape)
                for node in inferred_nodes)

            inferred_and_executable = [
                n for n in node.in_nodes().values() if n['is_partial_inferred']
                and 'executable' in n and n['executable']
            ]
            tensor = inferred_and_executable[0]

            if all([
                    tensor.has_valid('value') and n.has_valid('value')
                    and strict_compare_tensors(tensor.value, n.value)
                    for n in inferred_and_executable
            ]):
                node.out_node().value = tensor.value.copy()
            else:
                node.out_node().value = None

        # do not use set_shape(tensor.shape) here because input port shape may be different from the calculated output
        # shape and `set_shape` will raise an error that shape has changed
        node.out_node(0).shape = shape_array(tensor.shape)
Пример #11
0
    def test_reduce_dynamic(self, shape, axes, keepdims, p):
        false_mask = np.zeros(shape)
        false_mask[0][1][1] = True
        data = np.ma.masked_array(np.ones(shape), mask=false_mask)
        assert not is_fully_defined(data)
        reduced_tensor = np.sum(data, axis=tuple(axes), keepdims=keepdims)
        # create an array of all masked elements which is the expected result of the reduce of the tensor with dynamic
        # values
        fully_undefined = np.ma.masked_array(reduced_tensor, mask=np.ones(reduced_tensor.shape))
        axis = int64_array(axes)
        p = int64_array(p)
        graph = build_graph(nodes_attributes,
                            [*connect('data', '0:reduce_lp'),
                             *connect('axis', '1:reduce_lp'),
                             *connect('reduce_lp', '0:identity'),
                             ('identity', 'identity_d', {'out': 0}),
                             ('identity_d', 'output')
                             ],
                            {'data_d': {'value': data, 'shape': data.shape},
                             'axis_d': {'value': axis, 'shape': axis.shape},
                             'reduce_lp': {'keep_dims': keepdims}},
                            nodes_with_edges_only=True)

        reduce_node = Node(graph, 'reduce_lp')
        reduce_node.op = reduce_node.type = 'ReduceL' + str(p)
        reduce_infer(reduce_node)
        self.assertTrue(strict_compare_tensors(reduce_node.out_port(0).data.get_value(), fully_undefined))
Пример #12
0
    def test_pooling_dynamic_infer(self):
        graph = build_graph(nodes_attributes,
                            [('node_1', 'pool'),
                             ('pool', 'node_2'),
                             ('node_2', 'op_output')
                             ],
                            {'node_2': {'shape': None},
                             'node_1': {'shape': shape_array([1, dynamic_dimension_value, dynamic_dimension_value,
                                                              256])},
                             'pool': {'window': np.array([1, 1, 1, 1]), 'stride': np.array([1, 1, 2, 2]),
                                      'pad': np.array([[0, 0], [0, 0], [3, 3], [3, 3]]),
                                      'pad_spatial_shape': np.array([[3, 3], [3, 3]]),
                                      'pool_method': 'avg', 'exclude_pad': False, 'global_pool': False,
                                      'output_spatial_shape': None, 'output_shape': None,
                                      'kernel_spatial': np.array([3, 3]), 'spatial_dims': np.array([2, 3]),
                                      'channel_dims': np.array([1]), 'batch_dims': np.array([0]),
                                      'pooling_convention': 'full'}
                             })

        pool_node = Node(graph, 'pool')

        Pooling.infer(pool_node)
        exp_shape = shape_array([1, dynamic_dimension_value, dynamic_dimension_value, 131])
        res_shape = graph.node['node_2']['shape']
        self.assertTrue(strict_compare_tensors(exp_shape, res_shape))
Пример #13
0
    def test_concat_value_infer(self, value1, value2, output_value, axis):
        graph = build_graph(
            nodes_attributes, [('node_1', 'concat'), ('node_2', 'concat'),
                               ('concat', 'node_3'), ('node_3', 'op_output')],
            {
                'node_3': {
                    'shape': output_value.shape,
                    'value': output_value
                },
                'node_1': {
                    'shape': value1.shape,
                    'value': value1
                },
                'node_2': {
                    'shape': value2.shape,
                    'value': value2
                },
                'concat': {
                    'axis': axis
                }
            })

        concat_node = Node(graph, 'concat')
        concat_infer(concat_node)
        res_value = graph.node['node_3']['value']
        self.assertTrue(strict_compare_tensors(output_value, res_value))
Пример #14
0
 def test_get_inputs(self):
     # Reference data for test:
     ref_input_dict = {'data': shape_array([1, 10, 16])}
     # Check function:
     inputs_dict = self.IR.get_inputs()
     self.assertTrue(
         strict_compare_tensors(ref_input_dict['data'],
                                inputs_dict['data']),
         'Test on function get_inputs failed')
     log.info('Test for function get_inputs passed')
Пример #15
0
 def test_tf_space_to_depth_infer_nchw_dynamic(self):
     graph = build_graph(nodes, edges)
     graph.graph['layout'] = 'NCHW'
     graph.node['in_data_node']['shape'] = shape_array(
         [1, 64, dynamic_dimension_value, 1152])
     std_node = Node(graph, 'StD')
     SpaceToDepth.infer(std_node)
     exp_shape = shape_array([1, 256, dynamic_dimension_value, 576])
     res_shape = graph.node['out_data_node']['shape']
     self.assertTrue(strict_compare_tensors(exp_shape, res_shape))
Пример #16
0
    def test_expand_dims_infer(self, axis, ref_out_shape):
        graph = build_graph(nodes_attributes,
                            [('data_1', 'expand_dims'),
                             ('expand_dims', 'data_2')],
                            {'expand_dims': {'expand_axis': axis}})
        Node(graph, 'data_1').shape = shape_array([2, 3, dynamic_dimension_value, 224])
        expand_dims_node = Node(graph, 'expand_dims')

        ExpandDims.infer(expand_dims_node)

        self.assertTrue(strict_compare_tensors(expand_dims_node.out_node().shape, shape_array(ref_out_shape)))
Пример #17
0
    def build_select_graph_and_infer(condition_value,
                                     then_value,
                                     else_value,
                                     out_value,
                                     condition_shape=None,
                                     then_shape=None,
                                     else_shape=None,
                                     out_shape=None,
                                     auto_broadcast='numpy',
                                     fw_format=None):
        if then_value is not None:
            then_shape = int64_array(then_value.shape)
        if else_value is not None:
            else_shape = int64_array(else_value.shape)

        nodes = {
            **valued_const_with_data('then', then_value, then_shape),
            **valued_const_with_data('else', else_value, else_shape),
            **valued_const_with_data('condition', condition_value, condition_shape),
            **regular_op_with_empty_data(
                'select', {
                    'op': 'Select',
                    'auto_broadcast': auto_broadcast,
                    'format': fw_format
                }),
            **result('out'),
        }
        edges = [
            *connect('condition', '0:select'),
            *connect('then', '1:select'),
            *connect('else', '2:select'),
            *connect('select', 'out'),
        ]
        graph = build_graph(nodes, edges)

        select_node = Node(graph, 'select')
        Select.infer(select_node)

        select_out_node = Node(graph, 'select_d')

        value_desc = 'values'
        ref_val = out_value
        actual_val = select_out_node['value']
        if out_shape is not None:
            value_desc = 'shapes'
            ref_val = out_shape
            actual_val = select_out_node['shape']
            assert select_out_node[
                'value'] is None, "if 'out_shape' is defined manually 'value' must be None"

        flag = strict_compare_tensors(actual_val, ref_val)
        msg = '' if flag else 'reference {} and actual {} {} do not match\n'.format(
            ref_val, actual_val, value_desc)
        return flag, msg
Пример #18
0
    def test_split_dynamic_shape_infer(self):
        #  test configuration
        input_shape = [2, dynamic_dimension_value]
        input_value = None
        axis = 1
        num_splits = 2
        output_shape = [2, dynamic_dimension_value]
        output_value = [None, None]

        # action
        graph = build_graph(
            self.nodes, self.edges, {
                'split_input_data': {
                    'shape': shape_array(input_shape),
                    'value': input_value
                },
                'split_op': {
                    'axis': np.array(axis),
                    'num_splits': np.array(num_splits)
                },
            })

        split_op = Node(graph, 'split_op')
        AttributedSplit.infer(split_op)

        # reference
        graph_ref = build_graph(
            self.nodes, self.edges, {
                'split_input_data': {
                    'shape': shape_array(input_shape),
                    'value': input_value
                },
                'split_op': {
                    'axis': np.array(axis),
                    'num_splits': np.array(num_splits)
                },
                'split_output_0_data': {
                    'shape': shape_array(output_shape),
                    'value': output_value[0]
                },
                'split_output_1_data': {
                    'shape': shape_array(output_shape),
                    'value': output_value[1]
                },
            })

        # check
        (flag, resp) = compare_graphs(graph, graph_ref, 'split_input_data')
        self.assertTrue(flag, resp)
        self.assertTrue(
            strict_compare_tensors(
                Node(graph, 'split_output_0_data').shape,
                shape_array(output_shape)))
Пример #19
0
    def test_do_infer_without_top_k_dynamic_shape(self):
        graph = build_graph(
            nodes_attributes, [('node_1', 'detection_output_1'),
                               ('node_2', 'detection_output_1'),
                               ('node_3', 'detection_output_1'),
                               ('detection_output_1', 'node_4')],
            {
                'node_1': {
                    'shape': np.array([1, 34928])
                },
                'node_2': {
                    'shape': shape_array([dynamic_dimension_value, 183372])
                },
                'node_3': {
                    'shape': np.array([1, 2, 34928])
                },
                'detection_output_1': {
                    "background_label_id": "0",
                    "clip": "1",
                    "code_type": "caffe.PriorBoxParameter.CENTER_SIZE",
                    "confidence_threshold": "0.01",
                    "keep_top_k": -1,
                    "nms_threshold": "0.5",
                    "num_classes": 21,
                    "share_location": "1",
                    "top_k": -1,
                    "variance_encoded_in_target": "0"
                },
                'node_4': {
                    'shape': np.array([1, 1, 69856, 7])
                },
            })

        multi_box_detection_node = Node(graph, 'detection_output_1')

        multi_box_detection_infer(multi_box_detection_node)
        exp_shape = shape_array([1, 1, dynamic_dimension_value, 7])
        res_shape = graph.node['node_4']['shape']
        self.assertTrue(strict_compare_tensors(exp_shape, res_shape))

        self.assertEqual(multi_box_detection_node.background_label_id, '0')
        self.assertEqual(multi_box_detection_node.clip, '1')
        self.assertEqual(multi_box_detection_node.code_type,
                         'caffe.PriorBoxParameter.CENTER_SIZE')
        self.assertEqual(multi_box_detection_node.confidence_threshold, '0.01')
        self.assertEqual(multi_box_detection_node.keep_top_k, 8732)
        self.assertEqual(multi_box_detection_node.nms_threshold, '0.5')
        self.assertEqual(multi_box_detection_node.num_classes, 21)
        self.assertEqual(multi_box_detection_node.share_location, '1')
        self.assertEqual(multi_box_detection_node.top_k, -1)
        self.assertEqual(multi_box_detection_node.variance_encoded_in_target,
                         '0')
Пример #20
0
    def test_eltwise_infer(self, value1, shape1, value2, shape2, shape_infer,
                           exp_value, exp_shape):
        graph = build_graph(
            nodes_attributes, [('node_1', 'eltw_1'), ('node_2', 'eltw_1'),
                               ('eltw_1', 'node_3'), ('node_3', 'op_output')],
            {
                'node_3': {
                    'shape': None
                },
                'node_1': {
                    'shape':
                    shape_array(value1).shape
                    if value1 is not None else shape_array(shape1),
                    'value':
                    value1
                },
                'node_2': {
                    'shape':
                    shape_array(value2).shape
                    if value2 is not None else shape_array(shape2),
                    'value':
                    value2
                }
            })

        graph.graph['layout'] = 'NCHW'

        eltwise_node = Node(graph, 'eltw_1')

        eltwise_infer(eltwise_node, shape_infer)
        res_shape = graph.node['node_3']['shape']
        res_value = eltwise_node.out_node().value
        if exp_value is not None:
            self.assertTrue(
                strict_compare_tensors(res_value, shape_array(exp_value)))
        self.assertTrue(
            strict_compare_tensors(res_shape, shape_array(exp_shape)))
Пример #21
0
    def test_partial_infer_gather_slice_batch_dims2_dynamic3(self):
        nodes_attributes['gathernd_node']['batch_dims'] = 2
        graph = build_graph(nodes_attributes, edges, inputs11)
        gathernd_node = Node(graph, 'gathernd_node')
        GatherND.infer(gathernd_node)

        # prepare reference results
        ref_output_shape = shape_array([dynamic_dimension_value, 3, 5, 9])

        # get the result
        res_output_shape = graph.node['output']['shape']

        self.assertTrue(
            strict_compare_tensors(ref_output_shape, res_output_shape),
            'values do not match expected: {} and given: {}'.format(
                ref_output_shape, res_output_shape))
Пример #22
0
    def test_caffe_conv2d_dynamic_input_infer(self):
        graph = build_graph(
            nodes_attributes, [('conv_input', 'conv_node'),
                               ('conv_weights', 'conv_node'),
                               ('conv_node', 'conv_output'),
                               ('conv_output', 'op_output')],
            {
                'conv_output': {
                    'shape': None
                },
                'conv_input': {
                    'shape': shape_array([1, 3, dynamic_dimension_value, 227])
                },
                'conv_weights': {
                    'shape':
                    np.array([64, 3, 3, 3]),
                    'dim_attrs':
                    ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']
                },
                'conv_node': {
                    'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
                    'conv_pad': np.array([[0, 0], [0, 0], [0, 0], [0, 0]]),
                    'dilation': np.array([1, 1, 1, 1]),
                    'bias_addable': True,
                    'bias_term': False,
                    'output_spatial_shape': None,
                    'output_shape': None,
                    'stride': np.array([1, 1, 1, 1]),
                    'group': 1,
                    'kernel_spatial_idx': np.array([2, 3]),
                    'input_feature_channel': 1,
                    'output_feature_channel': 0,
                    'output': 64,
                    'kernel_spatial': np.array([3, 3]),
                    'spatial_dims': np.array([2, 3]),
                    'channel_dims': np.array([1]),
                    'batch_dims': np.array([0])
                }
            })

        conv_node = Node(graph, 'conv_node')
        Convolution.infer(conv_node)
        exp_shape = shape_array([1, 64, dynamic_dimension_value, 225])
        res_shape = graph.node['conv_output']['shape']
        self.assertTrue(strict_compare_tensors(exp_shape, res_shape))
Пример #23
0
 def _set_shape(self, shape):
     if self.node.graph.stage == 'front':
         raise NotImplementedError(
             "set_shape not implemented for front phase")
     else:
         if self.type == 'in':
             assert self.node.in_node(
                 self.idx, control_flow=self.control_flow).value is None
             self.node.in_node(
                 self.idx,
                 control_flow=self.control_flow).shape = shape_array(shape)
         else:
             data_node = self.node.out_node(self.idx,
                                            control_flow=self.control_flow)
             assert data_node.value is None or self.node.has_and_set('override_output_shape') or \
                    strict_compare_tensors(data_node.soft_get('force_shape', data_node.shape), shape_array(shape))
             self.node.out_node(
                 self.idx,
                 control_flow=self.control_flow).shape = shape_array(shape)
Пример #24
0
    def test_concat_infer(self, shape1, shape2, output_shape, axis):
        graph = build_graph(
            nodes_attributes, [('node_1', 'concat'), ('node_2', 'concat'),
                               ('concat', 'node_3'), ('node_3', 'op_output')],
            {
                'node_3': {
                    'shape': None,
                    'value': None
                },
                'node_1': {
                    'shape': shape_array(shape1)
                },
                'node_2': {
                    'shape': shape_array(shape2)
                },
                'concat': {
                    'axis': axis
                }
            })

        concat_node = Node(graph, 'concat')
        concat_infer(concat_node)
        res_shape = graph.node['node_3']['shape']
        self.assertTrue(strict_compare_tensors(output_shape, res_shape))
Пример #25
0
    def test_simple_shape_inf(self, cond, output_port_0_shape,
                              output_port_1_shape):
        then_graph_nodes = {
            **regular_op_with_empty_data(
                'param_1', {
                    'type': 'Parameter',
                    'kind': 'op',
                    'input_id': 1,
                    'shape': None,
                    'infer': Parameter.infer
                }),
            **regular_op_with_empty_data(
                'param_2', {
                    'type': 'Parameter',
                    'kind': 'op',
                    'input_id': 2,
                    'shape': None,
                    'infer': Parameter.infer
                }),
            **regular_op_with_empty_data(
                'add', {
                    'type': 'Add',
                    'kind': 'op',
                    'op': 'Add',
                    'infer': lambda node: eltwise_infer(node, Add.operation)
                }),
            **regular_op_with_empty_data(
                'mul', {
                    'type': 'Mul',
                    'kind': 'op',
                    'op': 'Mul',
                    'infer': lambda node: eltwise_infer(node, Mul.operation)
                }),
            **regular_op_with_empty_data(
                'res1', {
                    'kind': 'op',
                    'type': 'Result',
                    'op': 'Result',
                    'infer': lambda x: 0,
                    'output_id': 0
                }),
            **regular_op_with_empty_data(
                'res2', {
                    'kind': 'op',
                    'type': 'Result',
                    'op': 'Result',
                    'infer': lambda x: 0,
                    'output_id': 1
                })
        }
        then_graph_edges = [
            *connect('param_1', '0:add'),
            *connect('param_2', '1:add'),
            *connect('param_1', '1:mul'),
            *connect('param_2', '0:mul'),
            *connect('add', 'res1'),
            *connect('mul', 'res2'),
        ]

        else_graph_nodes = {
            **regular_op_with_empty_data(
                'param_1', {
                    'type': 'Parameter',
                    'kind': 'op',
                    'input_id': 1,
                    'shape': None,
                    'infer': Parameter.infer
                }),
            **regular_op_with_empty_data(
                'param_2', {
                    'type': 'Parameter',
                    'kind': 'op',
                    'input_id': 3,
                    'shape': None,
                    'infer': Parameter.infer
                }),
            **regular_op_with_empty_data('identity', {
                'kind': 'op',
                'op': 'Identity',
                'infer': Identity.infer
            }),
            **regular_op_with_empty_data('identity_1', {
                'kind': 'op',
                'op': 'Identity',
                'infer': Identity.infer
            }),
            **regular_op_with_empty_data(
                'res1', {
                    'kind': 'op',
                    'type': 'Result',
                    'op': 'Result',
                    'infer': lambda x: 0,
                    'output_id': 0
                }),
            **regular_op_with_empty_data(
                'res2', {
                    'kind': 'op',
                    'type': 'Result',
                    'op': 'Result',
                    'infer': lambda x: 0,
                    'output_id': 1
                })
        }
        else_graph_edges = [
            *connect('param_1', 'identity'),
            *connect('param_2', 'identity_1'),
            *connect('identity_1', 'res2'),
            *connect('identity', 'res1'),
        ]
        then_graph = build_graph_with_edge_attrs(then_graph_nodes,
                                                 then_graph_edges)
        else_graph = build_graph_with_edge_attrs(else_graph_nodes,
                                                 else_graph_edges)
        external_graph_nodes = {
            **valued_const_with_data('cond', cond),
            **valued_const_with_data('input_2', int64_array([3, 2, 1])),
            **valued_const_with_data('input_1', int64_array([1, 2, 3])),
            **valued_const_with_data('input_3', int64_array([8, 4])),
            **regular_op(
                'if', {
                    'kind': 'op',
                    'op': 'If',
                    'then_graph': then_graph,
                    'else_graph': else_graph,
                    'infer': If.infer
                }),
            **empty_data('if_d_1'),
            **empty_data('if_d_2'),
            **result('res_1'),
            **result('res_2')
        }
        external_graph_edges = [
            *connect('cond', '0:if'), *connect('input_1', '1:if'),
            *connect('input_2', '2:if'), *connect('input_3', '3:if'),
            ('if', 'if_d_1', {
                'out': 0
            }), ('if', 'if_d_2', {
                'out': 1
            }), ('if_d_1', 'res_1'), ('if_d_2', 'res_2')
        ]

        graph = build_graph(external_graph_nodes, external_graph_edges)
        graph.stage = 'middle'
        partial_infer(graph)
        if_node = Node(graph, 'if')
        self.assertTrue(
            strict_compare_tensors(
                if_node.out_port(0).data.get_shape(), output_port_0_shape))
        # shape of the "then" branch is [3] and shape of the "else" branch is [2], so the output shape is "[dynamic]"
        self.assertTrue(
            strict_compare_tensors(
                if_node.out_port(1).data.get_shape(), output_port_1_shape))
Пример #26
0
 def test_shape_insert(self, shape, pos, values, result):
     self.assertTrue(strict_compare_tensors(shape_insert(shape, pos, values), result))
Пример #27
0
 def test_two_inputs_two_shapes_positive_1(self):
     shape_1 = [1, 2, 3, 4]
     shape_2 = [4, 3, 2, 1]
     inputs = {
         'node_1': [{
             'shape': shape_1
         }],
         'node_4': [{
             'shape': shape_2
         }]
     }
     nodes = {
         'input_1': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'Parameter'
         },
         'input_2': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'Parameter'
         },
         'node_1': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'node_2': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'node_3': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'node_4': {
             'type': 'Identity',
             'kind': 'op',
             'op': 'NotPlaceholder'
         },
         'output': {
             'kind': 'op',
             'op': 'Result'
         }
     }
     edges = [('input_1', 'node_1'), ('node_1', 'node_2'),
              ('node_3', 'output'), ('input_2', 'node_4'),
              ('node_4', 'output')]
     graph = build_graph(nodes, edges)
     add_input_ops(graph=graph,
                   user_defined_inputs=inputs,
                   before_infer=True)
     new_input_1 = list(graph.in_edges('node_1'))[0][0]
     new_input_2 = list(graph.in_edges('node_4'))[0][0]
     self.assertFalse(graph.node['input_1']['is_input'])
     self.assertTrue(graph.node[new_input_1]['is_input'])
     self.assertTrue(graph.node[new_input_2]['is_input'])
     self.assertTrue((new_input_1, 'node_1') in graph.edges())
     self.assertTrue((new_input_2, 'node_4') in graph.edges())
     self.assertTrue(
         strict_compare_tensors(shape_1, graph.node[new_input_1]['shape']))
     self.assertTrue(
         strict_compare_tensors(shape_2, graph.node[new_input_2]['shape']))
Пример #28
0
 def test_shape_array(self, data, ref, result):
     self.assertEqual(strict_compare_tensors(shape_array(data), ref), result)
Пример #29
0
    def create_node_with_data(self,
                              inputs: list = None,
                              attrs: dict = None,
                              data_nodes: [Node, np.ndarray, list] = None,
                              edge_attrs: list = None):
        """
        Creates a new node with given inputs and attrs and also creates data node that
        holds the op output value. Inputs should be data nodes (not op nodes).
        Work for ops with a single output port only.
        Edge attributes in edge_attrs go in order of items in 'inputs'
        """
        if inputs is None:
            inputs = []
        if attrs is None:
            attrs = {}
        # No need to extract port, because input node should be a data node,
        # so there is no choice.
        new_op_node = self.add_node(attrs)

        # TODO Preserve debug information
        inputs_with_edge_attrs = []
        for i, inp in enumerate(inputs):
            if inp is None:
                continue
            edge_attr = {'in': i}
            if edge_attrs is not None and i < len(edge_attrs):
                edge_attr.update(edge_attrs[i])
            inputs_with_edge_attrs.append((inp.id, new_op_node.id, edge_attr))
            new_op_node.add_input_port(i, skip_if_exist=True)

        self.graph.add_edges_from(inputs_with_edge_attrs)

        # TODO: Extend to the case when multiple output ports
        old_data_value = [None]
        old_data_shape = [None]
        if data_nodes is None:
            data_node = self.graph.unique_id()
            self.graph.add_node(
                data_node,
                **add_attrs_props(
                    dict(kind='data',
                         name=data_node,
                         value=None,
                         shape=None,
                         data_type=None,
                         infer=None)))
            data_nodes = [Node(self.graph, data_node)]
        else:
            if type(data_nodes) not in [list, np.ndarray]:
                data_nodes = [data_nodes]
            old_data_value = [
                data_node.value.copy()
                if data_node.has_valid('value') else None
                for data_node in data_nodes
            ]
            old_data_shape = [
                data_node.shape.copy()
                if data_node.has_valid('shape') else None
                for data_node in data_nodes
            ]
        for id, data_node in enumerate(data_nodes):
            self.graph.add_edges_from([(new_op_node.id, data_node.id, {
                'out': id
            })])

        if new_op_node.has_valid('infer'):
            if log.getLogger().isEnabledFor(log.DEBUG):
                log.debug(
                    'Start running infer function for individual op node with attributes: {}'
                    ''.format(str(new_op_node)))
            new_op_node.infer(new_op_node)
            if new_op_node.has('nchw_layout'):
                for out_node in new_op_node.out_nodes().values():
                    out_node['nchw_layout'] = new_op_node.nchw_layout
            assert all(
                old_value is None for old_value in old_data_value) or all([
                    strict_compare_tensors(old_data_value[id], data_node.value)
                    for id, data_node in enumerate(data_nodes)
                ])
            assert all(old_shape is None for old_shape in old_data_shape) or all(
                [strict_compare_tensors(old_data_shape[id], data_node.shape)
                 for id, data_node in enumerate(data_nodes)]), \
                "After re-inference of {} node, old and new shapes do not match. Old shapes: {}, new shapes: {}." \
                "".format(new_op_node.soft_get('name'), [old_data_shape[id] for id in range(len(data_nodes))],
                          [data_node.shape for data_node in data_nodes])
            for data_node in data_nodes:
                if log.getLogger().isEnabledFor(log.DEBUG):
                    log.debug(
                        'Finished running infer function, data nodes attributes: {}'
                        .format(data_node))
        return data_nodes[0] if len(data_nodes) == 1 else data_nodes
Пример #30
0
 def test_shape_delete(self, shape, indices, result):
     self.assertTrue(strict_compare_tensors(shape_delete(shape, indices), result))