def extract(cls, node):
        mode = onnx_attr(node,
                         'mode',
                         's',
                         default='constant',
                         dst_type=lambda x: x.decode())
        pads = onnx_attr(node,
                         'pads',
                         'ints',
                         dst_type=lambda x: np.array(x, dtype=np.int64))
        value = onnx_attr(node, 'value', 'f', default=0.)

        assert pads is not None

        # MO Pad op and ONNX Pad op have different format for pads values
        # MO Pad has Dx2 where D is the total number of dimensions
        # ONNX Pad pads flat layout, so
        # need to reshape and transpose

        pads = np.transpose(pads.reshape([2, -1]))

        Pad.update_node_stat(node, {
            'mode': mode,
            'pads': pads,
            'fill_value': value
        })
        return cls.enabled
Пример #2
0
    def test_two_inputs_value_infer(self):
        in_value = np.random.rand(*self.node_attrs['data_in']['shape']).astype(
            np.float32)
        graph = build_graph(
            self.node_attrs,
            self.edge_attrs + [('pads_begin', 'pad'), ('pads_end', 'pad')],
            {'data_in': {
                'value': in_value
            }},
            nodes_with_edges_only=True,
        )

        pads = np.insert(
            self.node_attrs['pads_end']['value'],
            np.arange(len(self.node_attrs['pads_begin']['value'])),
            self.node_attrs['pads_begin']['value'])
        pads = np.reshape(pads,
                          (len(self.node_attrs['pads_begin']['value']), 2))
        ref_value = np.pad(in_value, pads, constant_values=0, mode='constant')

        pad_node = Node(graph, 'pad')
        Pad.infer(pad_node)

        self.assertTrue(
            np.array_equal(
                Node(graph, 'data_out').shape,
                np.array([1, 3, 100 + 1 + 3, 200 + 2 + 4])))
        self.assertTrue(
            np.array_equal(Node(graph, 'data_out').value, ref_value))
Пример #3
0
    def find_and_replace_pattern(self, graph: Graph):
        for attr_pad in graph.get_op_nodes(op='AttributedPad'):
            # save the original node name to use it in the new Pad op instance
            original_name = attr_pad.soft_get('name', attr_pad.id)

            new_pad = Pad(graph, {
                'mode': attr_pad.soft_get('mode', None),
            }).create_node()
            rename_nodes([(attr_pad, original_name + '/to_be_removed'),
                          (new_pad, original_name)])

            attr_pad.in_port(0).get_connection().set_destination(
                new_pad.in_port(0))
            new_pad.in_port(1).connect(
                Const(graph, {
                    'value': attr_pad.pads[:, 0]
                }).create_node().out_port(0))
            new_pad.in_port(2).connect(
                Const(graph, {
                    'value': attr_pad.pads[:, 1]
                }).create_node().out_port(0))
            if attr_pad.soft_get('mode') == 'constant':
                new_pad.in_port(3).connect(
                    Const(graph, {
                        'value': attr_pad.fill_value
                    }).create_node().out_port(0))

            attr_pad.out_port(0).get_connection().set_source(
                new_pad.out_port(0))
            graph.remove_node(attr_pad.id)
Пример #4
0
    def test_two_inputs_dynamic_value_infer(self):
        in_value = shape_array([dynamic_dimension_value, 3]).reshape(
            (1, 1, 1, 2))
        graph = build_graph(
            self.node_attrs,
            self.edge_attrs + [('pads_begin', 'pad'), ('pads_end', 'pad')],
            {'data_in': {
                'value': in_value,
                'shape': in_value.shape
            }},
            nodes_with_edges_only=True,
        )
        out_shape = (1, 1, 5, 8)
        mask = np.zeros(out_shape, dtype=np.bool)
        mask[0][0][1][2] = True
        ref_value = np.ma.masked_array(np.zeros(out_shape, dtype=np.int64),
                                       mask=mask,
                                       dtype=np.int64)
        ref_value[0][0][1][3] = 3

        pad_node = Node(graph, 'pad')
        Pad.infer(pad_node)
        output_value = Node(graph, 'data_out').value
        self.assertTrue(
            np.array_equal(Node(graph, 'data_out').shape, ref_value.shape))
        self.assertTrue(strict_compare_tensors(output_value, ref_value))
        self.assertTrue(isinstance(output_value, np.ma.masked_array))
        self.assertTrue(output_value[0][0][1][2] is dynamic_dimension)
Пример #5
0
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_op_nodes(op='SpaceToBatch') + graph.get_op_nodes(op='BatchToSpace'):
            node.add_input_port(3, skip_if_exist=True)

            # convert TF representation of the pads/crops as [N, 2] to IE representation: [N] and [N]
            transposed_pads = create_op_with_const_inputs(graph, Transpose, {1: int64_array([1, 0])})
            node.in_port(2).get_connection().set_destination(transposed_pads.in_port(0))
            split_pads = create_op_with_const_inputs(graph, Split, {1: int64_array(0)}, {'num_splits': 2})
            transposed_pads.out_port(0).connect(split_pads.in_port(0))
            for port_ind in range(2):
                node.in_port(port_ind + 2).connect(split_pads.out_port(port_ind))
                node.in_port(port_ind + 2).get_connection().insert_node(
                    create_op_with_const_inputs(graph, Squeeze, {1: int64_array([0])}))

            # add zeros/ones to related inputs to align it with data input
            in0_rank = Rank(graph, {'name': node.name + '/rank_0'}).create_node()
            in1_rank = Shape(graph, {'name': node.name + '/rank_1'}).create_node()

            diff_size = Sub(graph, {'name': node.name + '/sub_0'}).create_node()
            diff = Sub(graph, {'name': node.name + '/sub_1'}).create_node()

            const_begin = Const(graph, {'value': int64_array([1])}).create_node()
            const_pad_val = Const(graph, {'value': int64_array([1])}).create_node()

            block_shape = Pad(graph, {'name': node.name + '/aligned_block_shape', 'mode': 'constant'}).create_node()

            # in case of SpaceToBatch begin = pads_begin, end = pads_end
            # in case of BatchToSpace begin = crops_begin, end = crops_end
            new_begin_name = '/aligned_pads_begin'
            new_end_name = '/aligned_pads_end'
            if node.type == 'BatchToSpace':
                new_begin_name = '/aligned_crops_begin'
                new_end_name = '/aligned_crops_end'

            begin = Pad(graph, {'name': node.name + new_begin_name, 'mode': 'constant'}).create_node()
            end = Pad(graph, {'name': node.name + new_end_name, 'mode': 'constant'}).create_node()

            in0_rank_1d = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
                                                           {'name': node.name + '/1d_rank_of_0'}, in0_rank)
            in1_rank_1d = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
                                                           {'name': node.name + '/1d_rank_of_1'}, in1_rank)

            node.in_port(0).get_source().connect(in0_rank.in_port(0))
            node.in_port(1).get_source().connect(in1_rank.in_port(0))
            in0_rank_1d.out_port(0).connect(diff_size.in_port(0))
            in1_rank_1d.out_port(0).connect(diff_size.in_port(1))
            diff_size.out_port(0).connect(diff.in_port(0))
            const_begin.out_port(0).connect(diff.in_port(1))
            const_pad_val.out_port(0).connect(block_shape.in_port(3))

            inputs_array = [block_shape, begin, end]
            for idx, input_to_node in enumerate(inputs_array):
                node.in_port(idx + 1).get_connection().set_destination(input_to_node.in_port(0))
                const_begin.out_port(0).connect(input_to_node.in_port(1))
                diff.out_port(0).connect(input_to_node.in_port(2))
                input_to_node.out_port(0).connect(node.in_port(idx + 1))
Пример #6
0
 def test_one_input_and_no_pads(self):
     graph = build_graph(
         self.node_attrs,
         self.edge_attrs,
         nodes_with_edges_only=True,
     )
     pad_node = Node(graph, 'pad')
     with self.assertRaisesRegex(AssertionError,
                                 ".*pads attribute is missing.*"):
         Pad.infer(pad_node)
Пример #7
0
 def test_not_enough_inputs(self):
     graph = build_graph(
         self.node_attrs,
         self.edge_attrs + [('pads_begin', 'pad')],
         nodes_with_edges_only=True,
     )
     pad_node = Node(graph, 'pad')
     with self.assertRaisesRegex(AssertionError,
                                 ".*must have 3 or 4 inputs.*"):
         Pad.infer(pad_node)
Пример #8
0
    def find_and_replace_pattern(self, graph: Graph):
        for tfpad in graph.get_op_nodes(op='TFPad'):
            # save the original node name to use it in the new Pad op instance
            original_name = tfpad.soft_get('name', tfpad.id)
            tfpad['name'] = original_name + '/to_be_removed'

            new_pad = Pad(graph, {'mode': tfpad.soft_get('mode', None), }).create_node()
            rename_node(new_pad, original_name)

            tfpad.in_port(0).get_connection().set_destination(new_pad.in_port(0))

            if tfpad.soft_get('mode') == 'constant':
                # the input with fill value is an optional third input in TF
                if not tfpad.in_port(2).disconnected():
                    tfpad.in_port(2).get_connection().set_destination(new_pad.in_port(3))

            # convert TF representation of the pads as [N, 2] to MO representation: [N] and [N]
            transposed_pads = create_op_with_const_inputs(graph, Transpose, {1: int64_array([1, 0])})
            tfpad.in_port(1).get_connection().set_destination(transposed_pads.in_port(0))
            split_pads = create_op_with_const_inputs(graph, Split, {1: int64_array(0)}, {'num_splits': 2})
            transposed_pads.out_port(0).connect(split_pads.in_port(0))
            for port_ind in range(2):
                split_pads.add_output_port(port_ind, skip_if_exist=True)
                new_pad.in_port(port_ind + 1).connect(split_pads.out_port(port_ind))
                new_pad.in_port(port_ind + 1).get_connection().insert_node(
                    create_op_with_const_inputs(graph, Squeeze, {1: int64_array([0])}))

            tfpad.out_port(0).get_connection().set_source(new_pad.out_port(0))
            graph.remove_node(tfpad.id)
Пример #9
0
 def test_two_inputs(self):
     graph = build_graph(
         self.node_attrs,
         self.edge_attrs + [('pads_begin', 'pad'), ('pads_end', 'pad')],
         nodes_with_edges_only=True,
     )
     pad_node = Node(graph, 'pad')
     Pad.infer(pad_node)
     self.assertTrue(
         np.array_equal(
             Node(graph, 'data_out').shape,
             np.array([1, 3, 100 + 1 + 3, 200 + 2 + 4])))
Пример #10
0
    def extract(node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        pads = np.array(list(attrs.tuple('pad_width', int, None)))
        pads = pads.reshape([-1, 2])
        value = attrs.float('constant_value', 0.0)

        node_attrs = {
            'pads': pads,
            'mode': attrs.str('mode', None),
            'fill_value': value,
        }

        Pad.update_node_stat(node, node_attrs)
        return __class__.enabled
Пример #11
0
 def test_two_inputs_and_pads(self):
     graph = build_graph(
         self.node_attrs,
         self.edge_attrs + [('data_pads', 'pad')],
         {
             'pad': {
                 'pads':
                 np.array([[0, 0], [0, 0], [1, 3], [2, 4]], dtype=np.int64)
             }
         },
         nodes_with_edges_only=True,
     )
     pad_node = Node(graph, 'pad')
     with self.assertRaisesRegex(
             AssertionError, ".*unexpected additional input argument.*"):
         Pad.infer(pad_node)
Пример #12
0
    def convert_fft_to_dft(self, graph: Graph, mx_fft: Node):
        mx_fft_name = mx_fft.soft_get('name', mx_fft.id)
        unsqueeze_node = create_op_with_const_inputs(
            graph, Unsqueeze, {1: int64_array([-1])},
            {'name': mx_fft_name + '/Unsqueeze'})
        rank_node = Rank(graph, {'name': mx_fft_name + '/Rank'}).create_node()

        mx_fft_connection = mx_fft.in_port(0).get_connection()
        mx_fft_connection.set_destination(unsqueeze_node.in_port(0))
        mx_fft_connection.get_source().connect(rank_node.in_port(0))

        add_node = create_op_with_const_inputs(graph, Add, {1: int64_array(1)},
                                               {'name': mx_fft_name + '/Add'},
                                               rank_node)
        broadcast_node1 = create_op_with_const_inputs(
            graph, Broadcast, {0: int64_array(0)},
            {'name': mx_fft_name + '/Pad_broadcast'})
        add_node.out_port(0).connect(broadcast_node1.in_port(1))

        scatter_node = create_op_with_const_inputs(
            graph, ScatterUpdate, {
                2: int64_array(1),
                3: int64_array(0)
            }, {'name': mx_fft_name + '/ScatterUpdate'})
        broadcast_node1.out_port(0).connect(scatter_node.in_port(0))
        rank_node.out_port(0).connect(scatter_node.in_port(1))

        pad_node = Pad(graph, {
            'name': mx_fft_name + '/Pad',
            'mode': 'constant'
        }).create_node([unsqueeze_node, broadcast_node1, scatter_node])

        dft_node = create_op_with_const_inputs(
            graph, DFT, {1: int64_array([-1])}, {
                'name': mx_fft_name + '/DFT',
                'in_ports_count': 2
            }, pad_node)

        sub_node = create_op_with_const_inputs(graph, Sub, {1: int64_array(1)},
                                               {'name': mx_fft_name + '/Sub'})
        rank_node.out_port(0).connect(sub_node.in_port(0))
        broadcast_node2 = create_op_with_const_inputs(
            graph, Broadcast, {0: int64_array(0)},
            {'name': mx_fft_name + '/Reshape_broadcast'})
        sub_node.out_port(0).connect(broadcast_node2.in_port(1))
        concat_node = create_op_with_const_inputs(
            graph, Concat, {1: int64_array([-1, 2])}, {
                'name': mx_fft_name + '/New_shape',
                'in_ports_count': 2,
                'axis': 0
            }, broadcast_node2)

        reshape_node = Reshape(graph, {}).create_node([dft_node, concat_node])

        mx_fft.out_port(0).get_connection().set_source(
            reshape_node.out_port(0))
        rename_nodes([(mx_fft, mx_fft_name + '/to_be_removed'),
                      (reshape_node, mx_fft_name)])
Пример #13
0
    def replace_op(self, graph: Graph, node: Node):
        # save the original node name to use it in the new Pad op instance
        original_name = node.soft_get('name', node.id)
        rename_node(node, original_name + '/TBR')

        new_pad = Pad(graph, {
            'mode': node.soft_get('mode', None)
        }).create_node()
        rename_node(new_pad, original_name)

        node.in_port(0).get_connection().set_destination(new_pad.in_port(0))

        if node.soft_get('mode') == 'constant':
            # the input with fill value is an optional third input in ONNX
            if not node.in_port(2).disconnected():
                node.in_port(2).get_connection().set_destination(
                    new_pad.in_port(3))
            else:
                new_pad.in_port(3).connect(
                    Const(graph, {
                        'value': 0.0
                    }).create_node().out_port(0))

        # convert ONNX representation of the pads as [2 * N] to MO representation: [N] and [N]
        split_pads = create_op_with_const_inputs(graph, Split,
                                                 {1: int64_array(0)},
                                                 {'num_splits': 2})
        node.in_port(1).get_connection().set_destination(split_pads.in_port(0))
        split_pads.out_port(0).connect(new_pad.in_port(1))
        split_pads.out_port(1).connect(new_pad.in_port(2))

        return [new_pad.id]
Пример #14
0
 def test_one_input(self):
     graph = build_graph(
         self.node_attrs,
         self.edge_attrs,
         {
             'pad': {
                 'pads':
                 np.array([[0, 0], [0, 0], [1, 3], [2, 4]], dtype=np.int64)
             }
         },
         nodes_with_edges_only=True,
     )
     pad_node = Node(graph, 'pad')
     Pad.infer(pad_node)
     self.assertTrue(
         np.array_equal(
             Node(graph, 'data_out').shape,
             np.array([1, 3, 100 + 1 + 3, 200 + 2 + 4])))
Пример #15
0
    def replace_op(self, graph: Graph, node: Node):
        # Note that PyTorch paddings are reversed
        pads_begin = node.module.pad[::2][::-1]
        pads_end = node.module.pad[1::2][::-1]

        pads_begin = Const(graph, {
            'name': node.name + '/pads_begin',
            'value': pads_begin
        }).create_node()
        pads_end = Const(graph, {
            'name': node.name + '/pads_end',
            'value': pads_end
        }).create_node()
        pad_value = Const(graph, {
            'name': node.name + '/pad_value',
            'value': 0.0
        }).create_node()

        pad = Pad(graph, dict(name=node.name)).create_node(
            [node.in_node(0), pads_begin, pads_end, pad_value])
        return [pad.id]
Пример #16
0
 def extract(node):
     Pad.update_node_stat(node, {'mode': node.pb.attr['mode'].s.decode('utf-8').lower()})
     return __class__.enabled
Пример #17
0
 def extract(node):
     Pad.update_node_stat(node)
     return __class__.enabled
 def extract(cls, node):
     Pad.update_node_stat(node)
     return cls.enabled
Пример #19
0
    def find_and_replace_pattern(self, graph: Graph):
        for attr_pad in graph.get_op_nodes(op='AttributedPad'):
            # save the original node name to use it in the new Pad op instance
            original_name = attr_pad.soft_get('name', attr_pad.id)

            new_pad = Pad(graph, {
                'mode': attr_pad.soft_get('mode', None),
            }).create_node()
            rename_nodes([(attr_pad, original_name + '/to_be_removed'),
                          (new_pad, original_name)])

            attr_pad.in_port(0).get_connection().set_destination(
                new_pad.in_port(0))
            new_pad.in_port(1).connect(
                Const(graph, {
                    'value': attr_pad.pads[:, 0]
                }).create_node().out_port(0))
            new_pad.in_port(2).connect(
                Const(graph, {
                    'value': attr_pad.pads[:, 1]
                }).create_node().out_port(0))
            if attr_pad.soft_get('mode') == 'constant':
                # create Constant node of proper data type (equal to the data type of the Pad first input)
                convert_pad_value = create_op_with_const_inputs(
                    graph, ConvertLike, {0: attr_pad.fill_value},
                    {'name': original_name + '/pad_value_convert'})
                convert_pad_value.in_port(1).connect(
                    new_pad.in_port(0).get_source())
                new_pad.in_port(3).connect(convert_pad_value.out_port(0))

            attr_pad.out_port(0).get_connection().set_source(
                new_pad.out_port(0))
            graph.remove_node(attr_pad.id)