Пример #1
0
 def extract(cls, node: Node):
     onnx_opset_version = get_onnx_opset_version(node)
     if onnx_opset_version is not None and onnx_opset_version >= 11:
         mode = onnx_attr(node, 'mode', 's', default=b'nearest').decode()
         transformation_mode = onnx_attr(node,
                                         'coordinate_transformation_mode',
                                         's',
                                         default=b'half_pixel').decode()
         nearest_mode = onnx_attr(node,
                                  'nearest_mode',
                                  's',
                                  default=b'round_prefer_floor').decode()
         cubic_coeff_a = onnx_attr(node,
                                   'cubic_coeff_a',
                                   'f',
                                   default=-0.75)
         attrs = {
             'mode': mode,
             'coordinate_transformation_mode': transformation_mode,
             'nearest_mode': nearest_mode,
             'cube_coeff': cubic_coeff_a
         }
         ONNXResize11Op.update_node_stat(node, attrs)
     else:
         mode = onnx_attr(node, 'mode', 's', default=b'nearest').decode()
         UpsampleOp.update_node_stat(node, {'mode': mode})
     return cls.enabled
Пример #2
0
    def test_upsample_with_second_input_infer(self, scales, input_shape,
                                              expected_shape):
        nodes_attributes['scales'] = {'kind': 'data', 'value': scales}
        graph = build_graph(
            nodes_attributes, [('node_1', 'upsample'), ('scales', 'upsample'),
                               ('upsample', 'node_3'),
                               ('node_3', 'op_output')], {
                                   'node_3': {
                                       'shape': None
                                   },
                                   'node_1': {
                                       'shape': input_shape
                                   },
                                   'upsample': {
                                       'mode': 'linear',
                                       'height_scale': None,
                                       'width_scale': None
                                   }
                               })

        graph.graph['layout'] = 'NCHW'
        upsample_node = Node(graph, 'upsample')
        UpsampleOp.upsample_infer(upsample_node)
        res_shape = graph.node['node_3']['shape']
        for i in range(0, len(expected_shape)):
            self.assertEqual(expected_shape[i], res_shape[i])
Пример #3
0
    def extract(cls, node):
        onnx_opset_version = get_onnx_opset_version(node)
        if onnx_opset_version is not None and onnx_opset_version >= 9:
            mode = onnx_attr(node,
                             'mode',
                             's',
                             default='nearest',
                             dst_type=lambda x: x.decode())
            ONNXResize10.update_node_stat(node, {'mode': mode})
        else:
            mode = onnx_attr(node,
                             'mode',
                             's',
                             default='nearest',
                             dst_type=lambda x: x.decode())
            scales = onnx_attr(
                node,
                'scales',
                'floats',
                dst_type=lambda x: np.array(x, dtype=np.float32))
            width_scale = onnx_attr(node, 'width_scale', 'f')
            height_scale = onnx_attr(node, 'height_scale', 'f')

            supported_modes = ['nearest', 'linear']
            if mode not in supported_modes:
                raise Error(
                    'Error decoding Upsample node {}, mode = {} is not in the list of supported modes {}.',
                    node.name, mode, supported_modes)

            if scales is not None:
                if scales.shape != (4, ):
                    raise Error(
                        'Upsample scales attribute is wrong for node {}. Only 4D scales are supported.',
                        node.name)
                if math.fabs(scales[0] - 1) > 1e-5 or math.fabs(scales[1] -
                                                                1) > 1e-5:
                    raise Error(
                        'Upsampling of batch and feature dimensions is not supported for node {}.',
                        node.name)
                height_scale = scales[2]
                width_scale = scales[3]

            if (width_scale is None
                    or height_scale is None) and len(node.in_nodes()) != 2:
                raise Error(
                    'One/both of widths_scale = {} and height_scale = {} is not defined for Upsample node {}.',
                    width_scale, height_scale, node.name)

            UpsampleOp.update_node_stat(
                node, {
                    'mode': mode,
                    'height_scale': height_scale,
                    'width_scale': width_scale
                })
        return cls.enabled
    def replace_op(self, graph: Graph, node: Node):
        mode = node.module.mode
        if mode == 'bilinear':
            mode = 'linear'
        align_corners = node.module.align_corners

        if mode == 'linear' and not align_corners:
            height = node.module.size[0]
            width = node.module.size[1]
            attrs = {
                'name': node.name,
                'version': 'opset4',
                'height': height,
                'width': width,
                'mode': mode,
                'axes': [2, 3],
                'pads_begin': [0, 0],
                'pads_end': [0, 0],
                'align_corners': node.module.align_corners,
                'shape_calculation_mode': 'sizes',
            }

            sizes = Const(graph, {
                'value': np.array([height, width])
            }).create_node()
            axes = Const(graph, {'value': np.array([2, 3])}).create_node()
            scales = Const(graph, {
                'value': np.array([1, 1], dtype=np.float32)
            }).create_node()
            interp = Interpolate(graph, attrs).create_node(
                [node.in_node(0), sizes, scales, axes])
        else:
            if node.module.size:
                attrs = {
                    'name': node.name,
                    'version': 'opset1',
                    'height': node.module.size[0],
                    'width': node.module.size[1],
                    'mode': mode,
                    'axes': [2, 3],
                    'align_corners': node.module.align_corners,
                }
                interp = Interpolate(graph,
                                     attrs).create_node([node.in_node(0)])
            else:
                if not node.module.scale_factor:
                    raise Error('No scale_factor found')
                attrs = {
                    'name': node.name,
                    'height_scale': np.float(node.module.scale_factor),
                    'width_scale': np.float(node.module.scale_factor),
                    'mode': mode,
                    'align_corners': node.module.align_corners,
                }
                interp = UpsampleOp(graph,
                                    attrs).create_node([node.in_node(0)])

        return [interp.id]
Пример #5
0
    def test_upsample_with_scales_infer(self):
        graph = build_graph(
            nodes_attributes, [('node_1', 'upsample'), ('upsample', 'node_3'),
                               ('node_3', 'op_output')], {
                                   'node_3': {
                                       'shape': None
                                   },
                                   'node_1': {
                                       'shape': np.array([1, 3, 227, 227])
                                   },
                                   'upsample': {
                                       'mode': 'linear',
                                       'height_scale': 2.,
                                       'width_scale': 2.
                                   }
                               })

        graph.graph['layout'] = 'NCHW'
        upsample_node = Node(graph, 'upsample')
        UpsampleOp.upsample_infer(upsample_node)
        exp_shape = np.array([1, 3, 454, 454])
        res_shape = graph.node['node_3']['shape']
        for i in range(0, len(exp_shape)):
            self.assertEqual(exp_shape[i], res_shape[i])
Пример #6
0
    def test_upsample_with_scales_infer(self, scales, input_shape,
                                        expected_shape):
        graph = build_graph(
            nodes_attributes, [('node_1', 'upsample'), ('upsample', 'node_3'),
                               ('node_3', 'op_output')], {
                                   'node_3': {
                                       'shape': None,
                                       'value': None
                                   },
                                   'node_1': {
                                       'shape': input_shape
                                   },
                                   'upsample': {
                                       'mode': 'linear',
                                       'height_scale': scales[2],
                                       'width_scale': scales[3]
                                   }
                               })

        graph.graph['layout'] = 'NCHW'
        upsample_node = Node(graph, 'upsample')
        UpsampleOp.upsample_infer(upsample_node)
        res_shape = graph.node['node_3']['shape']
        self.assertTrue(strict_compare_tensors(expected_shape, res_shape))
Пример #7
0
 def extract(node: Node):
     mode = onnx_attr(node, 'mode', 's', default=b'nearest').decode()
     UpsampleOp.update_node_stat(node, {'mode': mode})
     return __class__.enabled
Пример #8
0
    def replace_sub_graph(graph: Graph, match: dict, **kwargs):
        def _input_node_value(node: Node, port_ind: int):
            input_node = node.in_port(port_ind).get_source().node
            return input_node.value if input_node.op == 'Const' else None

        transpose = match['transpose']
        transpose_order = _input_node_value(transpose, 1)
        if transpose_order is None or not np.all(
                np.equal(transpose_order, int64_array([1, 2, 3, 0]))):
            log.debug(
                'The transpose order {} for node {} is not equal to [1, 2, 3, 0]. Cannot apply '
                'BatchToSpaceNDToUpsample transformation.'.format(
                    transpose_order, transpose.name))
            return

        expand_axis = match['expand_dims']
        expand_axis_value = _input_node_value(expand_axis, 1)
        if expand_axis_value != 0:
            log.debug(
                'The expand axis {} for node {} is not equal to 0. Cannot apply BatchToSpaceNDToUpsample '
                'transformation.'.format(expand_axis_value, expand_axis.name))
            return

        tile = match['tile']
        tile_value = _input_node_value(tile, 1)
        if tile_value is None:
            log.debug(
                'The tile value is not defined for node {}. Cannot apply BatchToSpaceNDToUpsample '
                'transformation.'.format(tile.name))
            return

        if len(np.where(tile_value != 1)) != 1:
            log.debug(
                'The number of tiles not equal to 1 not equal to 1. Cannot apply BatchToSpaceNDToUpsample '
                'transformation.')
            return
        tile_batch = tile_value[0]

        batch_to_space_nd = match['batch_to_space_nd']
        block_shape = _input_node_value(batch_to_space_nd, 1)
        if block_shape is None or tile_batch != np.prod(block_shape):
            log.debug(
                'The block shape {} for node {} is not defined or inconsistent with the tile size. Cannot apply '
                'BatchToSpaceNDToUpsample transformation.'.format(
                    block_shape, batch_to_space_nd.name))
            return
        if len(block_shape) != 2:
            log.debug(
                'The block shape len is not equal to 2 for node {}. Cannot apply BatchToSpaceNDToUpsample '
                'transformation.'.format(batch_to_space_nd.name))
            return

        transpose_back = match['transpose_back']
        transpose_back_order = _input_node_value(transpose_back, 1)
        if transpose_back_order is None or not np.all(
                np.equal(transpose_back_order, int64_array([3, 0, 1, 2]))):
            log.debug(
                'The transpose order {} for node {} is not equal to [3, 0, 1, 2]. Cannot apply '
                'BatchToSpaceNDToUpsample transformation.'.format(
                    transpose_back_order, transpose_back.name))
            return

        upsample_node = UpsampleOp(
            graph, {
                'height_scale': block_shape[0],
                'width_scale': block_shape[1],
                'mode': 'nearest',
                'name': transpose.name + '/upsample'
            }).create_node()

        match['transpose'].in_port(0).get_connection().set_destination(
            upsample_node.in_port(0))
        match['transpose_back'].out_port(0).get_connection().set_source(
            upsample_node.out_port(0))
    def replace_op(self, graph: Graph, node: Node):
        mode = node.module.mode
        if mode.endswith('linear'):  # like bilinear or trilinear
            mode = 'linear'
        align_corners = node.module.align_corners

        if mode == 'linear':
            height = node.module.size[0] if node.module.size is not None else -1
            width = node.module.size[1] if node.module.size is not None else -1
            dims = node.module.dims
            axes = np.arange(2, dims)
            pads = np.zeros(dims, dtype=np.int32)
            scales = np.repeat(node.module.scale_factor,
                               dims - 2).astype(np.float32)
            attrs = {
                'name':
                node.name,
                'version':
                'opset4',
                'height':
                height,
                'width':
                width,
                'mode':
                mode,
                'axes':
                axes,
                'pads_begin':
                pads,
                'pads_end':
                pads,
                'coordinate_transformation_mode':
                'align_corners' if align_corners else 'half_pixel',
                'shape_calculation_mode':
                'sizes' if node.module.size is not None else 'scales',
            }

            sizes = Const(graph, {
                'value': np.array([height, width])
            }).create_node()
            axes = Const(graph, {'value': axes}).create_node()
            scales = Const(graph, {'value': scales}).create_node()
            interp = Interpolate(graph, attrs).create_node(
                [node.in_node(0), sizes, scales, axes])
        else:
            if node.module.size:
                attrs = {
                    'name': node.name,
                    'version': 'opset1',
                    'height': node.module.size[0],
                    'width': node.module.size[1],
                    'mode': mode,
                    'axes': [2, 3],
                    'align_corners': node.module.align_corners,
                }
                interp = Interpolate(graph,
                                     attrs).create_node([node.in_node(0)])
            else:
                if not node.module.scale_factor:
                    raise Error('No scale_factor found')
                attrs = {
                    'name': node.name,
                    'height_scale': np.float(node.module.scale_factor),
                    'width_scale': np.float(node.module.scale_factor),
                    'mode': mode,
                    'align_corners': node.module.align_corners,
                }
                interp = UpsampleOp(graph,
                                    attrs).create_node([node.in_node(0)])

        return [interp.id]