Пример #1
0
    def test_tf_resample_infer(self):
        graph = build_graph(
            nodes_attributes, [('node_1', 'resample'), ('resample', 'node_3'),
                               ('node_3', 'op_output')], {
                                   'node_3': {
                                       'shape': None
                                   },
                                   'node_1': {
                                       'shape': np.array([1, 3, 227, 227])
                                   },
                                   'resample': {
                                       'antialias': 1,
                                       'height': 384,
                                       'width': 512,
                                       'resample_type': 'LINEAR',
                                       'factor': 1.0
                                   }
                               })

        graph.graph['layout'] = 'NCHW'
        resample_node = Node(graph, 'resample')
        ResampleOp.resample_infer(resample_node)
        exp_shape = np.array([1, 3, 384, 512])
        res_shape = graph.node['node_3']['shape']
        for i in range(0, len(exp_shape)):
            self.assertEqual(exp_shape[i], res_shape[i])
Пример #2
0
 def test_tf_infer(self):
     new_width = 100
     new_height = 125
     new_attrs = nodes_attributes.copy()
     new_attrs.update({
         'new_shape': {
             'value': np.array([new_height, new_width]),
             'type': 'Const',
             'kind': 'op'
         }
     })
     graph = build_graph(
         new_attrs, [('node_1', 'resample'), ('new_shape', 'resample'),
                     ('resample', 'node_3'), ('node_3', 'op_output')], {
                         'node_3': {
                             'shape': None
                         },
                         'node_1': {
                             'shape': np.array([1, 224, 227, 3])
                         },
                         'resample': {
                             'antialias': 1,
                             'resample_type': 'LINEAR',
                             'factor': 1.0,
                             'fw': 'tf'
                         }
                     })
     graph.graph['layout'] = 'NHWC'
     resample_node = Node(graph, 'resample')
     ResampleOp.resample_infer(resample_node)
     exp_shape = np.array([1, new_height, new_width, 3])
     res_shape = graph.node['node_3']['shape']
     for i in range(0, len(exp_shape)):
         self.assertEqual(exp_shape[i], res_shape[i])
Пример #3
0
    def replace_sub_graph(self, graph: nx.MultiDiGraph, match: dict):
        log.debug('Matched NearestNeighborUpsampling pattern: {}'.format(
            [node.id for node in match.values()]))
        try:
            input_height = match['pack_1'].in_node(1).value.item()
            input_width = match['pack_1'].in_node(3).value.item()

            height_scale = match['mul_const'].shape[-4]
            width_scale = match['mul_const'].shape[-2]
        except Exception as ex:
            log.warning(
                'Failed to determine scaling parameters from the topology. Do not apply pattern.'
            )
            return

        resample_op = ResampleOp(
            graph, {
                'width': input_width * width_scale,
                'height': input_height * height_scale,
                'name': 'Resample_',
                'antialias': 0,
                'resample_type': 'caffe.ResampleParameter.NEAREST'
            })
        resample_node = resample_op.create_node([match['op']])

        replace_node(match['reshape_2'], resample_node)
        graph.remove_nodes_from(
            [node.id for node in match.values() if node.id != match['op'].id])
Пример #4
0
 def extract(node):
     mapping_rule = {
         'resample_type': 'caffe.ResampleParameter.NEAREST',
         'fw': 'tf',
         'antialias': 0
     }
     ResampleOp.update_node_stat(node, mapping_rule)
     return __class__.enabled
    def replace_pattern(graph: Graph, match: dict):
        node = match['interpolate']

        # common
        mode = node.mode
        assert mode in ['linear', 'nearest', 'cubic', 'area']
        in_shape = node.in_port(0).data.get_shape()
        assert in_shape is not None and len(in_shape) == 4
        out_shape = node.out_port(0).data.get_shape()
        assert out_shape is not None and len(out_shape) == 4
        in_height, in_width = in_shape[2], in_shape[3]
        out_height, out_width = out_shape[2], out_shape[3]
        factor = factor_update(
            None if not node.has_valid('factor') else node.factor,
            [float(out_height) / in_height,
             float(out_width) / in_width], [in_height, in_width],
            [out_height, out_width], node.soft_get('name'))
        update_attrs = {
            'width': out_width,
            'height': out_height,
            'factor': factor,
        }

        if (node.has_valid('shrink_factor')
                and node.has_valid('zoom_factor')) or factor is None:
            del update_attrs['factor']
            if node.has('factor'):
                del node['factor']

        if ((node.has_valid('shrink_factor') and node.shrink_factor != 1) or
            (node.has_valid('zoom_factor') and node.zoom_factor != 1) or 'factor' in update_attrs) \
                and ((not node.has_valid('width') or node.width == 0) and
                     (not node.has_valid('height') or node.height == 0)):
            update_attrs['width'] = 0
            update_attrs['height'] = 0

        # specific
        if mode in ['nearest', 'cubic', 'area'
                    ] or node.has_and_set('convert_to_resample'):
            assert not node.align_corners
            assert node.pads_begin == 0 and node.pads_end == 0
            update_attrs[
                'resample_type'] = InterpolateToInterpOrResample.type_map[mode]
            ResampleOp.update_node_stat(node, update_attrs)
            node.in_port(1).disconnect()
        elif mode == 'linear':
            update_attrs.update({
                'pad_beg': node.pads_begin,
                'pad_end': node.pads_end,
                'align_corners': node.align_corners,
            })
            InterpOp.update_node_stat(node, update_attrs)
            node.in_port(1).disconnect()
        node['force_precision_in_ports'] = None
Пример #6
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)

        node_attrs = {
            'type': 'Resample',
            'factor': attrs.int("scale", 1),
            'resample_type': 'caffe.ResampleParameter.NEAREST',
            'antialias': 0
        }
        # update the attributes of the node
        ResampleOp.update_node_stat(node, node_attrs)
        return __class__.enabled
Пример #7
0
    def extract(node):
        mode = onnx_attr(node, 'mode', 's', default='nearest', dst_type=lambda x: x.decode())
        scales = onnx_attr(node, 'scales', 'floats', dst_type=lambda x: np.array(x, dtype=np.float32))
        width_scale = onnx_attr(node, 'width_scale', 'f')
        height_scale = onnx_attr(node, 'height_scale', 'f')

        supported_modes = ['nearest', 'linear']
        if mode not in supported_modes:
            raise Error(
                'Error decoding Upsample node {}, mode = {} is not in the list of supported modes {}.',
                node.name,
                mode,
                supported_modes
            )

        # TODO: this is a temporary limitation
        if mode != 'nearest':
            raise Error(
                'Upsample mode {} for node {} is not supported. Only nearest is supported.',
                mode,
                node.name
            )

        # TODO: this is a temporary limitation
        if scales is not None:
            raise Error(
                'Upsample scales attribute is defined for node {}. Only scale_width and scale_height are supported.',
                node.name
            )

        if width_scale is None or height_scale is None:
            raise Error(
                'One/both of widths_scale = {} and height_scale = {} is not defined for Upsampe node {}.',
                width_scale,
                height_scale,
                node.name
            )

        if width_scale != height_scale:
            raise Error(
                'Upsample node {} have different widths_scale = {} and height_scale = {}. It is not supported; they should match.',
                node.name,
                width_scale,
                height_scale
            )

        mode_to_resample_type = {'nearest': 'caffe.ResampleParameter.NEAREST'}
        assert mode in mode_to_resample_type
        assert width_scale == height_scale
        assert width_scale is not None
        ResampleOp.update_node_stat(node, {'resample_type': mode_to_resample_type[mode], 'factor': width_scale, 'antialias': 0})
        return __class__.enabled
Пример #8
0
 def test_caffe_factor_infer(self):
     factor = 3.0
     graph = build_graph(nodes_attributes,
                         [('node_1', 'resample'),
                          ('resample', 'node_3')],
                         {'node_3': {'is_output': True, 'shape': None},
                          'node_1': {'shape': np.array([1, 3, 224, 227])},
                          'resample': {'antialias': 1,
                                       'resample_type': 'LINEAR',
                                       'factor': factor}
                          })
     graph.graph['layout'] = 'NCHW'
     resample_node = Node(graph, 'resample')
     ResampleOp.resample_infer(resample_node)
     exp_shape = np.array([1, 3, 224 * factor, 227 * factor])
     res_shape = graph.node['node_3']['shape']
     for i in range(0, len(exp_shape)):
         self.assertEqual(exp_shape[i], res_shape[i])
    def replace_pattern(graph: Graph, match: dict):
        node = match['interpolate']

        assert 1 in node.in_ports() and not node.in_port(1).disconnected() and \
               node.in_port(1).data.get_value() is not None, 'Interpolate node {} is corrupted: no 1-port input found'

        # common
        mode = node.mode
        assert mode in ['linear', 'nearest', 'cubic', 'area']
        in_shape = node.in_port(0).data.get_shape()
        assert in_shape is not None and len(in_shape) in [4, 5]
        out_shape = node.out_port(0).data.get_shape()
        assert out_shape is not None and len(out_shape) in [4, 5]
        in_height, in_width = in_shape[2], in_shape[3]
        out_height, out_width = out_shape[2], out_shape[3]
        factor = factor_update(
            None if not node.has_valid('factor') else node.factor,
            [float(out_height) / in_height,
             float(out_width) / in_width], [in_height, in_width],
            [out_height, out_width], node.soft_get('name'))
        update_attrs = {
            'width': out_width,
            'height': out_height,
            'factor': factor,
        }

        if (node.has_valid('shrink_factor')
                and node.has_valid('zoom_factor')) or factor is None:
            del update_attrs['factor']
            if node.has('factor'):
                del node['factor']

        if ((node.has_valid('shrink_factor') and node.shrink_factor != 1) or
            (node.has_valid('zoom_factor') and node.zoom_factor != 1) or 'factor' in update_attrs) \
                and ((not node.has_valid('width') or node.width == 0) and
                     (not node.has_valid('height') or node.height == 0)):
            update_attrs['width'] = 0
            update_attrs['height'] = 0

        # specific
        if mode in ['nearest', 'cubic', 'area'
                    ] or node.has_and_set('convert_to_resample'):
            assert not node.align_corners
            assert node.pads_begin == 0 and node.pads_end == 0
            update_attrs[
                'resample_type'] = InterpolateToInterpOrResample.type_map[mode]
            ResampleOp.update_node_stat(node, update_attrs)

            if not graph.graph[
                    'cmd_params'].keep_shape_ops or graph.graph['fw'] != 'tf':
                node.in_port(1).disconnect()
            else:
                # we avoid making resample non-reshapable for tf version
                shape = Shape(graph, {}).create_node()
                node.in_port(0).get_source().connect(shape.in_port(0))

                batch = node_to_get_batch_value(shape)
                features = node_to_get_features_dimension_value(shape)
                full_shape = new_shape_node_from_shape_nodes(
                    [batch, features,
                     node.in_port(1).get_source().node])
                node.in_port(1).get_connection().set_source(
                    full_shape.out_port(0))
                full_shape['override_output_shape'] = True

        elif mode == 'linear':
            assert len(in_shape) == 4, 'Interp does not support 5D input'
            update_attrs.update({
                'pad_beg': node.pads_begin,
                'pad_end': node.pads_end,
                'align_corners': node.align_corners,
            })
            InterpOp.update_node_stat(node, update_attrs)
            node.in_port(1).disconnect()