コード例 #1
0
 def extract(cls, node):
     attrs = {
         'op': __class__.op,
         'dim': node.module.shape,
     }
     Reshape.update_node_stat(node, attrs)
     return cls.enabled
コード例 #2
0
    def find_and_replace_pattern(self, graph: Graph):
        for roll_node in graph.get_op_nodes(op='Roll'):
            if not roll_node.in_port(2).disconnected():
                return
            node_name = roll_node.soft_get('name', roll_node.id)

            # reshape to 1d tensor
            reshape_to_1d = create_op_node_with_second_input(
                graph, Reshape, int64_array([-1]),
                {'name': node_name + '/reshape'})
            roll_node.in_port(0).get_connection().insert_node(reshape_to_1d)

            # add zero const as axes input to roll
            const_zero = Const(graph, {
                'value': int64_array([0]),
                'name': node_name + '/axes'
            }).create_node()
            const_zero.out_port(0).connect(roll_node.in_port(2))

            # reshape to original shape
            shape_of = Shape(graph, {
                'name': node_name + '/shape_of'
            }).create_node()
            reshape_to_1d.in_port(0).get_connection().add_destination(
                shape_of.in_port(0))
            reshape_to_orig_shape = Reshape(graph, {}).create_node()
            rename_nodes([(roll_node, node_name + '/roll'),
                          (reshape_to_orig_shape, node_name)])
            shape_of.out_port(0).connect(reshape_to_orig_shape.in_port(1))
            roll_node.out_port(0).get_connection().insert_node(
                reshape_to_orig_shape)
コード例 #3
0
    def replace_op(self, graph: Graph, node: Node):
        node_name = node.soft_get('name', node.id)
        assert node.has_valid(
            'axis'
        ), 'The node "{}" does not have mandatory attribute "axis"'.format(
            node_name)

        flatten_node = FlattenONNX(graph, {
            'name': node_name + '/FlattenONNX_',
            'axis': node.axis
        }).create_node()
        shape_node = Shape(graph, {
            'name': node_name + '/ShapeOf_'
        }).create_node()
        logsoftmax_node = LogSoftmax(graph, {
            'name': node_name + '/LogSoftmax_',
            'axis': 1
        }).create_node()
        reshape_node = Reshape(graph, {}).create_node()

        rename_nodes([(node, node_name + '/delete'),
                      (reshape_node, node_name)])

        shape_node.out_port(0).connect(reshape_node.in_port(1))
        logsoftmax_node.out_port(0).connect(reshape_node.in_port(0))
        flatten_node.out_port(0).connect(logsoftmax_node.in_port(0))

        source = node.in_port(0).get_source()

        flatten_node.in_port(0).connect(source)
        shape_node.in_port(0).connect(source)

        return [reshape_node.id]
コード例 #4
0
ファイル: reshape_test.py プロジェクト: yury-intel/openvino
 def test_reshape_infer(self, input_value, input_shape, output_shape,
                        ref_value, ref_shape):
     graph = build_graph(
         nodes_attributes, [('input', 'data'), ('data', 'reshape'),
                            ('output_shape', 'output_shape_data'),
                            ('output_shape_data', 'reshape'),
                            ('reshape', 'reshape_out')], {
                                'data': {
                                    'shape': input_shape,
                                    'value': input_value
                                },
                                'output_shape': {
                                    'value': output_shape,
                                    'shape': output_shape.shape
                                },
                                'output_shape_data': {
                                    'value': output_shape,
                                    'shape': output_shape.shape
                                },
                            })
     node = Node(graph, 'reshape')
     Reshape.infer(node)
     if ref_value is not None:
         self.assertTrue(
             strict_compare_tensors(
                 node.out_port(0).data.get_value(), shape_array(ref_value)))
     self.assertTrue(
         strict_compare_tensors(
             node.out_port(0).data.get_shape(), shape_array(ref_shape)))
コード例 #5
0
ファイル: reshape_ext.py プロジェクト: mikhailk62/openvino
 def extract(cls, node):
     dim = onnx_attr(node, 'shape', 'ints', None)
     if dim is not None:
         dim = int64_array(dim)
         Reshape.update_node_stat(node, {'dim': dim})
     else:
         Reshape.update_node_stat(node)
     return cls.enabled
コード例 #6
0
ファイル: MXFFTToDFT.py プロジェクト: yury-intel/openvino
    def convert_fft_to_dft(self, graph: Graph, mx_fft: Node):
        mx_fft_name = mx_fft.soft_get('name', mx_fft.id)
        unsqueeze_node = create_op_with_const_inputs(
            graph, Unsqueeze, {1: int64_array([-1])},
            {'name': mx_fft_name + '/Unsqueeze'})
        rank_node = Rank(graph, {'name': mx_fft_name + '/Rank'}).create_node()

        mx_fft_connection = mx_fft.in_port(0).get_connection()
        mx_fft_connection.set_destination(unsqueeze_node.in_port(0))
        mx_fft_connection.get_source().connect(rank_node.in_port(0))

        add_node = create_op_with_const_inputs(graph, Add, {1: int64_array(1)},
                                               {'name': mx_fft_name + '/Add'},
                                               rank_node)
        broadcast_node1 = create_op_with_const_inputs(
            graph, Broadcast, {0: int64_array(0)},
            {'name': mx_fft_name + '/Pad_broadcast'})
        add_node.out_port(0).connect(broadcast_node1.in_port(1))

        scatter_node = create_op_with_const_inputs(
            graph, ScatterUpdate, {
                2: int64_array(1),
                3: int64_array(0)
            }, {'name': mx_fft_name + '/ScatterUpdate'})
        broadcast_node1.out_port(0).connect(scatter_node.in_port(0))
        rank_node.out_port(0).connect(scatter_node.in_port(1))

        pad_node = Pad(graph, {
            'name': mx_fft_name + '/Pad',
            'mode': 'constant'
        }).create_node([unsqueeze_node, broadcast_node1, scatter_node])

        dft_node = create_op_with_const_inputs(
            graph, DFT, {1: int64_array([-1])}, {
                'name': mx_fft_name + '/DFT',
                'in_ports_count': 2
            }, pad_node)

        sub_node = create_op_with_const_inputs(graph, Sub, {1: int64_array(1)},
                                               {'name': mx_fft_name + '/Sub'})
        rank_node.out_port(0).connect(sub_node.in_port(0))
        broadcast_node2 = create_op_with_const_inputs(
            graph, Broadcast, {0: int64_array(0)},
            {'name': mx_fft_name + '/Reshape_broadcast'})
        sub_node.out_port(0).connect(broadcast_node2.in_port(1))
        concat_node = create_op_with_const_inputs(
            graph, Concat, {1: int64_array([-1, 2])}, {
                'name': mx_fft_name + '/New_shape',
                'in_ports_count': 2,
                'axis': 0
            }, broadcast_node2)

        reshape_node = Reshape(graph, {}).create_node([dft_node, concat_node])

        mx_fft.out_port(0).get_connection().set_source(
            reshape_node.out_port(0))
        rename_nodes([(mx_fft, mx_fft_name + '/to_be_removed'),
                      (reshape_node, mx_fft_name)])
コード例 #7
0
    def add_output_reshape(graph: Graph, match: dict):
        """
        Since MXNet Y output shape is [batch_size, seq_len, hidden_size * num_directions] we need to add reshape
        from above common format [batch_size, num_directions, seq_len, hidden_size] to MXNet format.
        """
        lstm = match['rnn_layer']
        input = match['input']
        if not lstm.has_num_directions:
            return
        old_data_node = lstm.out_node(0)
        num_directions = 2 if lstm.direction in ['bidirectional'] else 1
        mxnet_shape = lstm.out_node(0).shape.copy()

        if lstm.batch_dim == 0:
            mo_shape = shape_array([
                input.shape[lstm.batch_dim], input.shape[lstm.sequence_dim],
                lstm.hidden_size
            ])
        else:
            mo_shape = shape_array([
                input.shape[lstm.sequence_dim], input.shape[lstm.batch_dim],
                lstm.hidden_size
            ])

        if lstm.has_num_directions:
            mo_shape = shape_insert(mo_shape, 1, np.int64(num_directions))

        lstm_name = lstm.soft_get('name', lstm.id)

        new_data = Op._create_data_node(graph,
                                        name=lstm_name +
                                        '/Data/Reshape_mxnet/',
                                        attrs={'shape': mo_shape})
        graph.remove_edge(lstm.id, old_data_node.id)
        graph.add_edge(lstm.id, new_data.id, key=0, out=0)

        # Add Transpose
        permute_order = Const(
            graph, {
                'name': lstm_name + '/Transpose_mxnet_order',
                'value': int64_array([0, 2, 1, 3])
            }).create_node_with_data()
        permute_data = Transpose(graph, {
            'name': lstm_name + '/Transpose_mxnet/'
        }).create_node_with_data([new_data, permute_order])

        # Add Reshape
        reshape = Reshape(graph, {'name': lstm_name + '/Reshape_mxnet/'})
        reshape_dim_data = Const(
            graph, {
                'name': lstm_name + '/Reshape_mxnet_dim',
                'value': int64_array(unmask_shape(mxnet_shape))
            }).create_node_with_data()

        reshape.create_node_with_data([permute_data, reshape_dim_data],
                                      dict(),
                                      data_nodes=[old_data_node])
コード例 #8
0
    def generate_sub_graph(self, graph: Graph, match: SubgraphMatch):
        # IE DetectionOutput layer consumes flattened confidences and locations tensors.
        # That is why we add reshapes before them.
        locs_node = match.single_input_node(0)
        conf_node = match.single_input_node(1)
        prior_boxes_node = match.single_input_node(2)

        locs_out_nodes = locs_node[0].out_nodes()
        assert len(locs_out_nodes) == 1
        locs_out_node = locs_out_nodes[list(locs_out_nodes.keys())[0]]
        assert locs_out_node.op == "Result", locs_out_node.op
        graph.remove_node(locs_out_node.id)

        conf_out_nodes = conf_node[0].out_nodes()
        assert len(conf_out_nodes) == 1
        conf_out_node = conf_out_nodes[list(conf_out_nodes.keys())[0]]
        assert conf_out_node.op == "Result", conf_out_node.op
        graph.remove_node(conf_out_node.id)

        # reshape operation to flatten confidence tensor
        const = Const(graph, {'value': int64_array([0, -1])}).create_node()
        reshape_loc_node = Reshape(graph, {}).create_node(
            [locs_node, const], dict(name='DetectionOutput_Reshape_loc_'))

        # reshape operation to flatten confidence tensor
        reshape_conf_node = Reshape(graph, {}).create_node(
            [conf_node, const], dict(name='DetectionOutput_Reshape_conf_'))

        # remove the Result node after the priors node
        assert prior_boxes_node[0].out_node().op == "Result"
        graph.remove_node(prior_boxes_node[0].out_node().id)

        # reshape operation for prior boxes tensor
        const = Const(graph, {'value': int64_array([1, 2, -1])}).create_node()
        reshape_priors_node = Reshape(graph, {}).create_node(
            [prior_boxes_node, const],
            dict(name='DetectionOutput_Reshape_priors_'))
        # create Detection Output node with three inputs: locations, confidences and prior boxes
        detection_output_op = DetectionOutput(
            graph, match.custom_replacement_desc.custom_attributes)
        detection_output_node = detection_output_op.create_node(
            [reshape_loc_node, reshape_conf_node, reshape_priors_node],
            dict(name=detection_output_op.attrs['type'] + '_'))
        PermuteAttrs.set_permutation(reshape_priors_node,
                                     detection_output_node, None)

        # create Output node to mark DetectionOutput as a graph output operation
        output_op = Result(graph)
        output_op.create_node([detection_output_node], dict(name='sink_'))
        return {}
コード例 #9
0
ファイル: reshape_ext.py プロジェクト: mikhailk62/openvino
    def extract(cls, node):
        attrs = get_mxnet_layer_attrs(node.symbol_dict)
        dim = attrs.tuple("shape", int, None)
        reverse = attrs.bool("reverse", False)
        update_attrs = {
            'dim': int64_array(dim),
            'reverse': reverse
        }
        for d in dim:
            if d in [-2, -3, -4] or reverse:
                MXReshape.update_node_stat(node, update_attrs)
                return cls.enabled

        # update the attributes of the node
        Reshape.update_node_stat(node, update_attrs)
        return cls.enabled
コード例 #10
0
ファイル: MXFFTToDFT.py プロジェクト: yury-intel/openvino
    def convert_ifft_to_dft(self, graph: Graph, mx_fft: Node):
        mx_fft_name = mx_fft.soft_get('name', mx_fft.id)

        rank_node = Rank(graph, {'name': mx_fft_name + '/rank'}).create_node()
        sub_node = create_op_with_const_inputs(graph, Sub, {1: int64_array(1)},
                                               {'name': mx_fft_name + '/Sub'})
        rank_node.out_port(0).connect(sub_node.in_port(0))
        broadcast_node0 = create_op_with_const_inputs(
            graph, Broadcast, {0: int64_array(0)},
            {'name': mx_fft_name + '/broadcast'})
        sub_node.out_port(0).connect(broadcast_node0.in_port(1))
        concat_node = create_op_with_const_inputs(
            graph, Concat, {1: int64_array([-1, 2])}, {
                'name': mx_fft_name + '/new_shape',
                'in_ports_count': 2,
                'axis': 0
            }, broadcast_node0)

        reshape_node = Reshape(graph, {
            'name': mx_fft_name + '/reshape'
        }).create_node()
        concat_node.out_port(0).connect(reshape_node.in_port(1))

        mx_fft_connection = mx_fft.in_port(0).get_connection()
        mx_fft_connection.set_destination(reshape_node.in_port(0))
        mx_fft_connection.get_source().connect(rank_node.in_port(0))

        dft_node = create_op_with_const_inputs(
            graph, IDFT, {1: int64_array([-1])}, {
                'name': mx_fft_name + '/idft',
                'in_ports_count': 2
            }, reshape_node)

        split_node = create_op_with_const_inputs(
            graph, Split, {1: int64_array(-1)}, {
                'name': mx_fft_name + '/split',
                'num_splits': 2
            }, dft_node)
        squeeze_node = create_op_with_const_inputs(graph, Squeeze,
                                                   {1: int64_array([-1])}, {},
                                                   split_node)

        mx_fft.out_port(0).get_connection().set_source(
            squeeze_node.out_port(0))
        rename_nodes([(mx_fft, mx_fft_name + '/to_be_removed'),
                      (squeeze_node, mx_fft_name)])
コード例 #11
0
    def extract(cls, node):
        param = node.pb.reshape_param

        if param.axis != 0:
            log.error(
                'The operation "Reshape" has attribute "axis" with unsupported value "{}"'
                .format(param['axis']))
            return False

        if param.num_axes != -1:
            log.error(
                'The operation "Reshape" has attribute "num_axes" with unsupported value "{}"'
                .format(param['num_axes']))
            return False

        Reshape.update_node_stat(node, {
            'dim': list(param.shape.dim),
        })
        return cls.enabled
コード例 #12
0
    def squeeze_initial_states(graph: Graph, match: dict):
        """
        Squeeze input initial states of recurrent node to 2-D shape.
        """
        hidden_init_port = 5
        cell_init_port = 6

        rnn_layer = match['rnn_layer']
        # Add input ports to rnn_layer
        rnn_layer.add_sequence_of_ports(type='in', rng=range(7))
        rnn_layer_name = rnn_layer.soft_get('name', rnn_layer.id)

        assert hidden_init_port in rnn_layer.in_nodes()
        hidden_size = rnn_layer.hidden_size
        shape = Shape(graph, dict(name=rnn_layer_name + '/ShapeOf')).create_node()
        rnn_layer.in_port(0).get_source().connect(shape.in_port(0))

        batch = node_to_get_shape_value_of_indices(shape, int64_array([rnn_layer.batch_dim]))
        new_dim = create_op_node_with_second_input(graph, Concat, second_input_value=int64_array([hidden_size]),
                                                   op_attrs=dict(name=rnn_layer_name + '/HiddenStateResizeDim',
                                                                 in_ports_count=2, axis=0), input_node=batch)
        reshape_h = Reshape(graph, dict(name=rnn_layer_name + '/HiddenStateResize', override_output_shape=True)).create_node()
        new_dim.out_port(0).connect(reshape_h.in_port(1))
        rnn_layer.in_port(hidden_init_port).get_connection().insert_node(reshape_h)

        if rnn_layer.op == 'LSTM':
            assert cell_init_port in rnn_layer.in_nodes()

            reshape_c = Reshape(graph, dict(name=rnn_layer_name + '/CellStateResize', override_output_shape=True)).create_node()
            new_dim.out_port(0).connect(reshape_c.in_port(1))
            rnn_layer.in_port(cell_init_port).get_connection().insert_node(reshape_c)
コード例 #13
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        node = match['mxreshape']

        input_index = 0
        reshape_index = 0
        shape_node = Shape(graph, dict(name=node.id +
                                       '/ShapeMXReshape')).create_node()
        shape_node.in_port(0).connect(node.in_port(0).get_source())
        output_dims_nodes = []
        for d in node.dim:
            if reshape_index < len(node.dim):
                input_index, reshape_index, output_dims_nodes = self.resolve(
                    input_index, reshape_index, node.dim, shape_node,
                    output_dims_nodes)

        concat_node = Concat(
            shape_node.graph,
            dict(name=shape_node.id + '/ConcatMXReshape_',
                 axis=0,
                 in_ports_count=len(output_dims_nodes))).create_node()

        for in_port_index, dim_node in enumerate(output_dims_nodes):
            concat_node.in_port(in_port_index).connect(dim_node.out_port(0))

        reshape_node = Reshape(graph,
                               dict(name=node.id + '/Reshape_')).create_node()
        reshape_node.in_port(1).connect(concat_node.out_port(0))
        node.in_port(0).get_connection().set_destination(
            reshape_node.in_port(0))
        node.out_port(0).get_connection().set_source(reshape_node.out_port(0))
コード例 #14
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        node = match['flatten']
        name = node.soft_get('name', node.id)

        assert node.has_valid(
            'axis'
        ), 'Flatten {} should have `axis` attribute extracted, but it\'s not'.format(
            name)
        axis = node.axis

        reshape_node = Reshape(graph, {
            'name': node.id + '/Reshape'
        }).create_node()

        if axis == 0:
            dim = Const(
                graph, {
                    'value': int64_array([1, -1]),
                    'name': reshape_node.name + '/shape'
                }).create_node()
        elif axis == 1:
            dim = Const(
                graph, {
                    'value': int64_array([0, -1]),
                    'name': reshape_node.name + '/shape'
                }).create_node()
        else:
            shape = Shape(graph, {'name': name + '/input_shape'}).create_node()

            idxs = list(range(axis)) if axis > 0 else list(range(axis, 0))

            axis_shape_portion = node_to_get_shape_value_of_indices(
                shape, idxs)
            first_dims = create_op_node_with_second_input(
                graph, ReduceProd, int64_array([0]), {
                    'name': name + '/first_dims',
                    'keep_dims': True
                })
            second_dims = Const(graph, {
                'value': int64_array([-1]),
                'name': name + '/second_dims'
            }).create_node()

            node.in_port(0).get_source().connect(shape.in_port(0))
            axis_shape_portion.out_port(0).connect(first_dims.in_port(0))

            order_of_dims = [first_dims, second_dims
                             ] if axis > 0 else [second_dims, first_dims]

            dim = new_shape_node_from_shape_nodes(order_of_dims)

        reshape_node.in_port(1).connect(dim.out_port(0))

        node.out_port(0).get_connection().set_source(reshape_node.out_port(0))
        node.in_port(0).get_connection().set_destination(
            reshape_node.in_port(0))
コード例 #15
0
    def find_and_replace_pattern(self, graph: Graph):
        layout = graph.graph['layout']
        for eltwise_op_node in graph.get_op_nodes(is_eltwise=True):
                out_shape = eltwise_op_node.out_port().data.get_shape()
                if 4 <= len(out_shape) <= 5:
                    out_features = out_shape[get_features_dim(layout, len(out_shape))]
                    for port, node in eltwise_op_node.in_nodes().items():
                        if len(node.shape) != len(out_shape) and len(node.shape) == 1 and out_features == node.shape[0]:
                            new_shape = shape_for_layout(layout, batch=1, features=out_features, height=1, width=1,
                                                         depth=1 if len(out_shape) == 5 else None)
                            dim_const = Const(graph, {'value': new_shape, 'name': node.id + '/Dim'}).create_node()
                            reshape_op = Reshape(graph, attrs={'dim': new_shape, 'name': node.id + '/Broadcast'}).create_node()

                            eltwise_op_node.in_port(port).get_source().node.out_port(0).get_connection().set_destination(reshape_op.in_port(0))
                            reshape_op.in_port(1).connect(dim_const.out_port(0))

                            reshape_op.out_port(0).connect(eltwise_op_node.in_port(port))
コード例 #16
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        node = match['flatten']
        name = node.soft_get('name', node.id)

        assert node.has_valid('axis'), 'Flatten {} has no mandatory `axis` attribute'.format(name)
        assert node.has_valid('end_axis'), 'Flatten {} has no mandatory `end_axis` attribute'.format(name)

        axis = node.axis
        end_axis = node.end_axis

        if end_axis == -1 and axis >= 0:
            begin_dims = Const(graph, {'value': int64_array([0] * axis)}).create_node()
            middle_dim = Const(graph, {'value': int64_array([-1])}).create_node()
            end_dims = Const(graph, {'value': int64_array([])}).create_node()
        else:
            rank = Rank(graph, {'name': name + '/input_rank'}).create_node()
            node.in_port(0).get_source().connect(rank.in_port(0))

            shape = Shape(graph, {'name': name + '/input_shape'}).create_node()
            node.in_port(0).get_source().connect(shape.in_port(0))

            begin_dims = get_shape_values_by_range_idxs(
                shape=shape, rank=rank, begin=0, end=axis)
            middle_dims = get_shape_values_by_range_idxs(
                shape=shape, rank=rank, begin=axis, end=end_axis, include_end=True)
            end_dims = get_shape_values_by_range_idxs(
                shape=shape, rank=rank, begin=end_axis, end=-1, include_begin=False, include_end=True)

            middle_dim = create_op_node_with_second_input(graph, ReduceProd, int64_array([0]), {'keep_dims': True})
            middle_dims.out_port(0).connect(middle_dim.in_port(0))

        dim = new_shape_node_from_shape_nodes([begin_dims, middle_dim, end_dims])

        original_name = node.soft_get('name')
        abandoned_name = original_name + '/ShouldBeDeleted'
        reshape_node = Reshape(graph, {}).create_node()
        # Keep node with the same name to avoid confuse with renaming
        rename_nodes([(node, abandoned_name), (reshape_node, original_name)])
        reshape_node.in_port(1).connect(dim.out_port(0))

        node.out_port(0).get_connection().set_source(reshape_node.out_port(0))
        node.in_port(0).get_connection().set_destination(reshape_node.in_port(0))
コード例 #17
0
    def replace_pattern(self, graph: Graph, match: dict):
        node = match['op']
        node_name = node.soft_get('name', node.id)
        node.is_training = False

        shape = node.in_port(1).data.get_shape()
        assert shape is not None, 'The shape of scale input of the BatchNorm node {} is not defined'.format(
            node.name)

        bn_mean = Const(
            graph, {
                'name': node_name + '/mean',
                'value': np.zeros(shape, dtype=np.float32),
                'override_output_shape': True
            }).create_node()
        bn_std = Const(
            graph, {
                'name': node_name + '/std',
                'value': np.ones(shape, dtype=np.float32),
                'override_output_shape': True
            }).create_node()
        node.in_port(3).get_connection().set_source(bn_mean.out_port(0))
        node.in_port(4).get_connection().set_source(bn_std.out_port(0))

        # save the original shape
        original_shape = Shape(
            graph, {
                'name': node.in_port(0).get_source().node.soft_get('name')
            }).create_node()
        original_shape.in_port(0).connect(node.in_port(0).get_source())

        input_rank = len(node.in_port(0).data.get_shape())
        rng = create_op_with_const_inputs(graph, Range, {
            0: int64_array(1),
            1: int64_array(input_rank - 1),
            2: int64_array(1)
        }, {
            'name': node_name + '/Range',
            'output_type': np.int64
        })
        mvn = MVN(
            graph, {
                'name': node_name + '/mvn_',
                'eps': node.soft_get('eps', 1e-6),
                'eps_mode': 'inside_sqrt',
                'normalize_variance': 1,
                'override_output_shape': True
            }).create_node()
        node.in_port(0).get_connection().insert_node(mvn)
        mvn.in_port(1).connect(rng.out_port(0))

        reshape_4d = create_op_node_with_second_input(
            graph, Reshape, int64_array([1, -1, 0, 0]), {
                'override_output_shape': True,
                'name': node_name + '/fused_batch_and_channels'
            })
        mvn.in_port(0).get_connection().insert_node(reshape_4d)

        # restore original shape
        reshape_back = Reshape(graph, {
            'name': node_name + '/restore_shape',
            'override_output_shape': True
        }).create_node()
        reshape_back.in_port(1).connect(original_shape.out_port(0))
        mvn.out_port(0).get_connection().insert_node(reshape_back)
コード例 #18
0
    def append_variances(priors_scale_node: Node, variance: list):
        graph = priors_scale_node.graph
        name = priors_scale_node.name

        sp_shape = Shape(graph, {'name': name + '/shape'}).create_node()
        priors_scale_node.out_port(0).connect(sp_shape.in_port(0))

        begin = Const(graph, {'value': int64_array([-2])}).create_node()
        end = Const(graph, {'value': int64_array([-1])}).create_node()
        stride = Const(graph, {'value': int64_array([1])}).create_node()
        shape_part_for_tiling = StridedSlice(graph, {'name': name + '/get_-2_dim', 'begin_mask': int64_array([1]),
                                                     'end_mask': int64_array([1]), 'new_axis_mask': int64_array([0]),
                                                     'shrink_axis_mask': int64_array([0]),
                                                     'ellipsis_mask': int64_array([0])}).create_node()

        sp_shape.out_port(0).connect(shape_part_for_tiling.in_port(0))
        begin.out_port(0).connect(shape_part_for_tiling.in_port(1))
        end.out_port(0).connect(shape_part_for_tiling.in_port(2))
        stride.out_port(0).connect(shape_part_for_tiling.in_port(3))

        shape_concat = create_op_node_with_second_input(graph, Concat, int64_array([4]),
                                                        {'name': name + '/shape_for_tiling', 'in_ports_count': 2,
                                                         'axis': int64_array(0)},
                                                        shape_part_for_tiling)

        variance = Const(graph, {'name': name + '/variance', 'value': float32_array(variance)}).create_node()
        tile = Broadcast(graph, {'name': name + '/variance_tile'}).create_node()
        variance.out_port(0).connect(tile.in_port(0))
        shape_concat.out_port(0).connect(tile.in_port(1))

        reshape_dim = Const(graph, {'value': int64_array([-1, 4])}).create_node()
        sp_reshape = Reshape(graph, {'name': name + '/reshape'}).create_node()
        sp_reshape.in_port(0).connect(priors_scale_node.out_port(0))
        sp_reshape.in_port(1).connect(reshape_dim.out_port(0))

        concat = Concat(graph,
                        {'name': name + '/priors_concat', 'axis': int64_array(0), 'in_ports_count': 2}).create_node()
        sp_reshape.out_port(0).connect(concat.in_port(0))
        tile.out_port(0).connect(concat.in_port(1))

        output_dims = Const(graph, {'value': int64_array([1, 2, -1])}).create_node()
        output_node = Reshape(graph, {'name': name + '/3D_priors_wth_variances'}).create_node()
        concat.out_port(0).connect(output_node.in_port(0))
        output_dims.out_port(0).connect(output_node.in_port(1))

        return output_node
コード例 #19
0
    def transform_graph(self, graph: Graph, replacement_descriptions: dict):
        parameter_node = graph.get_op_nodes(op='Parameter')[0]
        parameter_node['data_type'] = data_type_str_to_np(
            parameter_node.graph.graph['cmd_params'].data_type)

        # remove existing Result operations to remove unsupported sub-graph
        graph.remove_nodes_from(
            [node.id
             for node in graph.get_op_nodes(op='Result')] + ['detections'])

        # determine if the op which is a input/final result of mean value and scale applying to the input tensor
        # then connect it to the input of the first convolution of the model, so we remove the image pre-processing
        # which includes padding and resizing from the model
        preprocessing_input_node_id = replacement_descriptions[
            'preprocessing_input_node']
        assert preprocessing_input_node_id in graph.nodes, 'The node with name "{}" is not found in the graph. This ' \
                                                           'should be a last node before image normalization and is specified' \
                                                           ' in the json file.'.format(preprocessing_input_node_id)
        preprocessing_input_node = Node(graph, preprocessing_input_node_id)
        consumer_node = preprocessing_input_node.out_port(
            0).get_connection().get_destination().node
        consumer_node.in_port(0).get_connection().set_source(
            parameter_node.out_port(0))

        preprocessing_output_node_id = replacement_descriptions[
            'preprocessing_output_node']
        assert preprocessing_output_node_id in graph.nodes, 'The node with name "{}" is not found in the graph. This ' \
                                                            'node should provide scaled image output and is specified' \
                                                            ' in the json file.'.format(preprocessing_output_node_id)
        preprocessing_output_node = Node(graph, preprocessing_output_node_id)
        preprocessing_output_node.out_port(0).disconnect()

        convolution_nodes = [
            n for n in graph.pseudo_topological_sort()
            if n.soft_get('type') == 'Convolution'
        ]
        convolution_nodes[0].in_port(0).get_connection().set_source(
            preprocessing_output_node.out_port(0))

        # create prior boxes (anchors) generator
        aspect_ratios = replacement_descriptions['aspect_ratios']
        assert len(aspect_ratios) % 2 == 0
        aspect_ratios = list(zip(aspect_ratios[::2], aspect_ratios[1::2]))
        priors_generator = self.AnchorGenerator(
            min_level=int(replacement_descriptions['min_level']),
            aspect_ratios=aspect_ratios,
            num_scales=int(replacement_descriptions['num_scales']),
            anchor_scale=replacement_descriptions['anchor_scale'])

        prior_boxes = []
        for i in range(100):
            inp_name = 'box_net/box-predict{}/BiasAdd'.format('_%d' %
                                                              i if i else '')
            if inp_name not in graph:
                break
            widths, heights = priors_generator.get(i)
            prior_box_op = PriorBoxClusteredOp(
                graph, {
                    'width': mo_array(widths),
                    'height': mo_array(heights),
                    'clip': 0,
                    'flip': 0,
                    'variance': replacement_descriptions['variance'],
                    'offset': 0.5
                })
            prior_boxes.append(
                prior_box_op.create_node(
                    [Node(graph, inp_name), parameter_node]))

        # concatenate prior box operations
        concat_prior_boxes = Concat(graph, {'axis': -1}).create_node()
        for idx, node in enumerate(prior_boxes):
            concat_prior_boxes.add_input_port(idx)
            concat_prior_boxes.in_port(idx).connect(node.out_port(0))

        conf = Sigmoid(graph, dict(name='concat/sigmoid')).create_node(
            [Node(graph, 'concat')])
        reshape_size_node = Const(graph, {
            'value': int64_array([0, -1])
        }).create_node([])
        logits = Reshape(graph, dict(name=conf.name + '/Flatten')).create_node(
            [conf, reshape_size_node])
        deltas = Reshape(graph, dict(name='concat_1/Flatten')).create_node(
            [Node(graph, 'concat_1'), reshape_size_node])

        # revert convolution boxes prediction weights from yxYX to xyXY (convolutions share weights and bias)
        weights = Node(graph, 'box_net/box-predict/pointwise_kernel')
        weights.value = weights.value.reshape(-1, 4)[:, [1, 0, 3, 2]].reshape(
            weights.shape)
        bias = Node(graph, 'box_net/box-predict/bias')
        bias.value = bias.value.reshape(-1,
                                        4)[:, [1, 0, 3, 2]].reshape(bias.shape)

        detection_output_node = DetectionOutput(
            graph,
            dict(
                name='detections',
                share_location=1,
                background_label_id=int(
                    replacement_descriptions['num_classes']) + 1,
                nms_threshold=replacement_descriptions['nms_threshold'],
                confidence_threshold=replacement_descriptions[
                    'confidence_threshold'],
                top_k=100,
                keep_top_k=100,
                code_type='caffe.PriorBoxParameter.CENTER_SIZE',
            )).create_node([deltas, logits, concat_prior_boxes])

        output_op = Result(graph, dict(name='output'))
        output_op.create_node([detection_output_node])
コード例 #20
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        node = match['op']
        name = node.soft_get('name', node.id)
        axis = node.axis
        input_shape_node = Shape(graph, {
            'name': name + '/ShapeOf'
        }).create_node()
        range_node = create_op_with_const_inputs(graph, Range, {
            0: mo_array(node.start),
            2: mo_array(node.step)
        }, {'name': name + '/Range'})
        node.in_port(0).get_connection().set_destination(
            input_shape_node.in_port(0))

        if axis is not None:
            '''
            Replace arange_like op to subgraph:
            Shape - Gather - Range
            '''
            gather_node = create_op_with_const_inputs(graph, Gather, {
                1: int64_array([axis]),
                2: int64_array(0)
            }, {'name': name + '/Gather'})
            input_shape_node.out_port(0).connect(gather_node.in_port(0))
            gather_node.out_port(0).connect(range_node.in_port(1))
            node.out_port(0).get_connection().set_source(
                range_node.out_port(0))
            rename_nodes([(node, name + '/ShouldBeDeleted'),
                          (range_node, name)])
        else:
            r'''
            Replace arange_like op to subgraph:
                    |
                 ShapeOf ----------- | 
                    |                |
                 ReduceProd          |
                    |                |
                  Range              |
                    |                |
                 Reshape ----------- | 
                    |
            '''

            flattened_shape_node = create_op_with_const_inputs(
                graph, ReduceProd, {1: int64_array([0])}, {
                    'name': input_shape_node.name + '/ReduceProd',
                    'keep_dims': True
                })
            reshape_backward_node = Reshape(graph, {
                'name': name + '/Reshape_backward'
            }).create_node()

            input_shape_node.out_port(0).connect(
                flattened_shape_node.in_port(0))
            flattened_shape_node.out_port(0).connect(range_node.in_port(1))
            range_node.out_port(0).connect(reshape_backward_node.in_port(0))
            input_shape_node.out_port(0).connect(
                reshape_backward_node.in_port(1))
            node.out_port(0).get_connection().set_source(
                reshape_backward_node.out_port(0))
            rename_nodes([(node, name + '/ShouldBeDeleted'),
                          (reshape_backward_node, name)])

        if node.repeat != 1:
            r"""
            First, we generate the correct stop value for Range like new_stop_value = stop_value // repeat + 1.
            Then repeats each value of the interval using Tile. After that we can get a longer interval
            so we reduce it with Slice.
            
            Sub-graph after Range node will be look like
            
            Range - Reshape([-1, 1]) - Tile([1, repeat]) - Reshape(-1) - Slice
            
            """

            if node.repeat < 1:
                raise Error(
                    "Unexpected value {} of the attribute 'repeat' for the node {}"
                    .format(node.repeat, name))

            div_node = create_op_with_const_inputs(
                graph, Div, {1: int64_array([node.repeat])},
                {'name': name + '/Divide'})
            add_node = create_op_with_const_inputs(
                graph, Add, {1: int64_array([1])},
                {'name': div_node.name + '/Add'})
            cast_node = Cast(graph, {
                'name': name + '/ConvertToI64',
                'dst_type': np.int64
            }).create_node()

            cast_node.out_port(0).connect(div_node.in_port(0))
            div_node.out_port(0).connect(add_node.in_port(0))
            range_node.in_port(1).get_connection().set_destination(
                cast_node.in_port(0))
            add_node.out_port(0).connect(range_node.in_port(1))

            tile_forward_reshape = create_op_with_const_inputs(
                graph, Reshape, {1: int64_array([-1, 1])},
                {'name': range_node.name + '/ForwardReshape'})
            tile = create_op_with_const_inputs(
                graph, Tile, {1: int64_array([1, node.repeat])},
                {'name': tile_forward_reshape.name + '/Tile'})
            tile_backward_reshape = create_op_with_const_inputs(
                graph, Reshape, {1: int64_array([-1])},
                {'name': tile.name + '/BackwardReshape'})
            slice_node = create_op_with_const_inputs(
                graph, Slice, {
                    1: int64_array([0]),
                    3: int64_array([0]),
                    4: int64_array([1])
                }, {'name': tile_backward_reshape.name + '/Slice'})

            tile_forward_reshape.out_port(0).connect(tile.in_port(0))
            tile.out_port(0).connect(tile_backward_reshape.in_port(0))
            tile_backward_reshape.out_port(0).connect(slice_node.in_port(0))
            slice_node.in_port(2).connect(div_node.in_port(0).get_source())

            range_node.out_port(0).get_connection().set_source(
                slice_node.out_port(0))
            range_node.out_port(0).connect(tile_forward_reshape.in_port(0))

            if axis is not None:
                rename_nodes([(range_node, name + '/Range'),
                              (slice_node, name)])

        # MXNet arange_like op has no stop attribute and the result tensor always matches the input shape, so
        # we have to correct the stop value for the Range node if step != 1 or start != 0
        if node.step != 1:
            # If step attribute is not integer, we will generate an interval with a larger size and then reduce it
            # using Slice
            true_elements_count_port = range_node.in_port(1).get_source()
            mul_value = np.ceil(node.step) if node.step > 0 else np.floor(
                node.step)
            stop_value = create_op_with_const_inputs(
                graph,
                Mul,
                port_value_dict={1: mo_array(np.ceil(mul_value))},
                op_attrs={'name': range_node.name + '/Stop'})
            range_node.in_port(1).get_connection().insert_node(stop_value)

            slice_range_values = create_op_with_const_inputs(
                graph, Slice, {
                    1: int64_array([0]),
                    3: int64_array([0]),
                    4: int64_array([1])
                }, {'name': range_node.name + '/Slice'})
            slice_range_values.in_port(2).connect(true_elements_count_port)
            range_node.out_port(0).get_connection().insert_node(
                slice_range_values)

            if axis is not None and node.repeat == 1:
                rename_nodes([(range_node, name + '/Range'),
                              (slice_range_values, name)])

        if node.start != 0:
            correct_stop_value = create_op_with_const_inputs(
                graph,
                Add,
                port_value_dict={1: mo_array(node.start)},
                op_attrs={'name': range_node.name + '/Correct_Stop'})
            range_node.in_port(1).get_connection().insert_node(
                correct_stop_value)

        # Range node supports only scalar inputs
        squeeze_node = create_op_with_const_inputs(
            graph,
            Squeeze,
            port_value_dict={1: int64_array(0)},
            op_attrs={"name": range_node.name + '/Stop/Squeeze'})
        range_node.in_port(1).get_connection().insert_node(squeeze_node)
コード例 #21
0
    def replace_pattern(self, graph: Graph, match: dict):
        assert match['operator'].has('multiplication_transparent_ports')

        port = match['operator'].input_ports_with(match['quantized'])
        assert len(port) >= 1
        if len(port) > 1:
            log.debug(
                'BinarizeWeightsM1P1 cannot apply transformation for data {} because it consumed more'
                ' than once'.format(match['quantized'].name))
            return

        assert len(port) == 1
        port = port[0]
        applicable = [
            pair for pair in match['operator'].multiplication_transparent_ports
            if pair[0] == port
        ]
        if len(applicable) == 0:
            return

        # Look at 3-rd and 4-th inputs of FakeQuantize -- they have constants that should be passed through.
        # Assume that the constant that should be passed through is a scalar.
        quantize = match['quantize']
        output_low = quantize.in_node(3)
        output_high = quantize.in_node(4)

        quantize_name = quantize.soft_get('name', quantize.id)

        if not output_low.has_valid('value') and not output_high.has_valid(
                'value'):
            return

        output_low = output_low.value
        output_high = output_high.value

        # This pass is applicable for binarization only. Other intX variants are not relevant.
        if quantize.levels != 2:
            return

        # Recognize two cases: 0/+1 and -1/+1.
        zp1 = np.all(output_low == 0) or np.all(output_high == 0)
        m1p1 = np.all(-output_low == output_high)
        if (not zp1 and not m1p1) or (zp1 and m1p1):
            log.debug(
                'BinarizeWeightsM1P1 cannot apply transformation for data {} because it does\'t has one of'
                ' 0/+1 or -1/+1 forms.'.format(match['quantized'].name))
            return

        # TODO: Extract real scalar from 3rd and 4th inputs; reusing original tensors is dangerous because
        #       it may have incompatible shape.

        mult_term = quantize.in_node(3) if np.all(
            output_high == 0) else quantize.in_node(4)

        new_shape = Const(
            graph, {
                'name': quantize_name + '/Reshape/Shape',
                'value': int64_array([-1, 1, 1])
            }).create_node_with_data()
        reshape = Reshape(graph, {
            'name': quantize_name + '/Reshape'
        }).create_node_with_data([mult_term, new_shape])

        # Patch inflow path (by diving by mult_term)
        # Put a new Pow/Mul combination here:
        #       ---->---- (here)---> data ---> [3rd/4th ports]quantize ---> quantized ---> operator

        if len(match['quantized'].out_nodes()) > 1:
            log.debug(
                'BinarizeWeightsM1P1: len(match[\'quantized\'].out_nodes()) > 1'
            )
            return
        power_of_exponent = Const(graph, {
            'name': quantize_name + '/DivNormalize/Power',
            'value': mo_array(-1.0)
        }).create_node_with_data()
        div_op = Pow(graph, {'name': quantize_name + '/DivNormalize'})
        div_output = div_op.create_node_with_data(
            [mult_term, power_of_exponent])

        for i in [3, 4]:
            match['quantize'].insert_node_with_data_before(
                match['quantize'].in_node(i),
                Mul,
                dict(name=quantize_name + '/MulNormalize'),
                additional_inputs=[div_output],
            )

        match[
            'quantized'].value = None  # reset value because it will be recomputed
        match['quantize'].infer(match['quantize'])

        # Put a complimentary new Mul node here:   operator -->---(here)-----> operator.out_node()

        match['operator'].insert_node_with_data_after(
            match['operator'].out_node(),
            Mul,
            dict(name=match['operator'].name + '/MulNormalize'),
            [reshape],
        )

        # Disable 'operator' fusion with linear ops, otherwise it will annihilate changes that we just made
        match['operator']['can_be_fused'] = False
コード例 #22
0
    def replace_pattern(self, graph: Graph, match: Dict[str, Node]):
        group_norm_node = match['op']
        group_norm_num_input_dims = len(group_norm_node.in_port(0).data.get_shape())

        # node computing initial GroupNorm input shape
        initial_shape_op_node = Shape(graph, {'name': group_norm_node.name + '/Shape'}).create_node()
        initial_shape_op_node.in_port(0).connect(group_norm_node.in_port(0).get_source())

        initial_shape_op_node_float = Cast(
            graph, {'name': initial_shape_op_node.name + '/to_float',
                    'dst_type': data_type_str_to_np(graph.graph['cmd_params'].data_type)}).create_node()
        initial_shape_op_node.out_port(0).connect(initial_shape_op_node_float.in_port(0))

        initial_batch_dim_node = node_to_get_batch_value(initial_shape_op_node_float)
        initial_features_dim_node = node_to_get_features_dimension_value(initial_shape_op_node_float)
        initial_spatial_dims_node_int = node_to_get_spatial_dimensions_value(initial_shape_op_node)
        initial_spatial_dims_node = Cast(
            graph, {'name': initial_spatial_dims_node_int.name + '/to_float',
                    'dst_type': data_type_str_to_np(graph.graph['cmd_params'].data_type)}).create_node()
        initial_spatial_dims_node_int.out_port(0).connect(initial_spatial_dims_node.in_port(0))

        group_size_node = Const(graph, {'value': int64_array([group_norm_node.num_groups]),
                                        'name': group_norm_node.name + '/GroupSize'}).create_node()

        # calculate "features // group_size" value
        reciprocal_group_size_node = Const(graph, {'value': np.array([1.0 / group_norm_node.num_groups]),
                                                   'name': group_norm_node.name + '/ReciprocalGroupSize'}).create_node()

        c_div_g_node = Mul(graph, {}).create_node()
        c_div_g_node.in_port(0).connect(initial_features_dim_node.out_port(0))
        c_div_g_node.in_port(1).connect(reciprocal_group_size_node.out_port(0))

        batch_mul_group_size_node = Mul(graph, {}).create_node()
        batch_mul_group_size_node.in_port(0).connect(initial_batch_dim_node.out_port(0))
        batch_mul_group_size_node.in_port(1).connect(group_size_node.out_port(0))

        # create new node which concatenates several dims to one
        new_shape_node_float = new_shape_node_from_shape_nodes([batch_mul_group_size_node, c_div_g_node,
                                                                initial_spatial_dims_node])
        new_shape_node = Cast(graph,
                              {'name': new_shape_node_float.name + '/to_int64', 'dst_type': np.int64}).create_node()
        new_shape_node_float.out_port(0).connect(new_shape_node.in_port(0))

        reshape_for_mvn_node = Reshape(graph, {}).create_node()

        group_norm_node.in_port(0).get_connection().set_destination(reshape_for_mvn_node.in_port(0))
        reshape_for_mvn_node.in_port(1).connect(new_shape_node.out_port(0))

        # Reshape the gamma and beta constants to correct layout from [C] to [1,C], [1,C,1], [1,C,1,1] etc
        gamma_beta_shape = np.ones([group_norm_num_input_dims], dtype=np.int64)
        gamma_beta_shape[1] = -1

        gamma_value = group_norm_node.in_port(1).get_source().data.get_value()
        beta_value = group_norm_node.in_port(2).get_source().data.get_value()
        assert gamma_value is not None, 'The gamma should be constant'
        assert beta_value is not None, 'The beta should be constant'
        gamma_value = np.reshape(gamma_value, gamma_beta_shape)
        group_norm_node.in_port(1).get_source().data.set_value(gamma_value)
        beta_value = np.reshape(beta_value, gamma_beta_shape)
        group_norm_node.in_port(2).get_source().data.set_value(beta_value)

        # MVN
        mvn_node = MVN(graph, {'name': group_norm_node.name + '/MVN',
                               'normalize_variance': 1,
                               'eps': group_norm_node.eps,
                               'eps_mode': 'inside_sqrt'}).create_node()
        mvn_node.in_port(0).connect(reshape_for_mvn_node.out_port(0))

        # MVN axes
        _, rank = get_shape_and_rank_nodes_by_port(mvn_node.in_port(0).get_connection().get_source(),
                                                   return_as_a_scalar=True)
        rng = create_op_with_const_inputs(graph, Range, {0: int64_array(1), 2: int64_array(1)},
                                          {'name': group_norm_node.name + '/Range', 'output_type': np.int64})
        mvn_node.in_port(1).connect(rng.out_port(0))
        rng.in_port(1).connect(rank.out_port(0))

        # reshape to the initial shape before multiplying with gamma and adding beta
        reshape_to_initial_shape_node = Reshape(graph, {}).create_node()
        reshape_to_initial_shape_node.in_port(0).connect(mvn_node.out_port(0))
        reshape_to_initial_shape_node.in_port(1).connect(initial_shape_op_node.out_port(0))

        mul_node = Mul(graph, {'name': mvn_node.name + '/Mul'}).create_node()
        mul_node.in_port(0).connect(reshape_to_initial_shape_node.out_port(0))
        group_norm_node.in_port(1).get_connection().set_destination(mul_node.in_port(1))

        add_node = Add(graph, {'name': mul_node.name + '/Add'}).create_node()
        add_node.in_port(0).connect(mul_node.out_port(0))
        group_norm_node.in_port(2).get_connection().set_destination(add_node.in_port(1))

        group_norm_node.out_port(0).get_connection().set_source(add_node.out_port(0))
コード例 #23
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['pool']
        node_name = node.soft_get('name', node.id)

        if node.pool_step is None:
            node.stride = int64_array([1, 1, node.window[-1], node.window[-1]])

        # create Reshape before convolution
        # shape = [in_shape[0], pool_stride, 1, in_shape[1]/pool_stride]
        i_shape = Shape(graph, {'name': node_name + '/Shape'}).create_node()

        dst_dtype = np.float32  # even if data_type=FP16 use float32 for shape values
        shape = Cast(graph, {
            'name': node_name + '/to_float',
            'dst_type': dst_dtype
        }).create_node()
        i_shape.in_port(0).connect(node.in_port(0).get_source())
        shape.in_port(0).connect(i_shape.out_port(0))

        N, H = node_to_get_shape_value_of_indices(
            shape, [0]), node_to_get_shape_value_of_indices(shape, [1])

        div = create_op_with_const_inputs(
            graph, Div, {1: float32_array([node.pool_stride])},
            {'name': node_name + '/div_stride_h'})
        div.in_port(0).connect(H.out_port(0))

        concat = create_op_with_const_inputs(
            graph, Concat, {
                1: float32_array([node.pool_stride]),
                2: float32_array([1])
            }, {
                'name': node_name + '/concat_all_dims',
                'in_ports_count': 4,
                'axis': 0
            })
        concat.in_port(0).connect(N.out_port(0))
        concat.in_port(3).connect(div.out_port(0))

        reshape_pattern = Cast(graph, {
            'name': node_name + '/to_int',
            'dst_type': np.int64
        }).create_node()
        concat.out_port(0).connect(reshape_pattern.in_port(0))

        reshape_in = Reshape(graph, {
            'name': node_name + '/reshape_in'
        }).create_node()
        reshape_in.in_port(1).connect(reshape_pattern.out_port(0))

        # create Reshape after Convolution
        reshape_out = create_op_node_with_second_input(
            graph, Reshape, int64_array([0, -1]),
            {'name': node_name + '/reshape_out'})

        # connect input_reshape_node
        source = node.in_port(0).get_source()
        node.in_port(0).get_connection().set_source(reshape_in.out_port(0))
        reshape_in.in_port(0).connect(source)
        # connect output_reshape_node
        node.out_port(0).get_connection().set_source(reshape_out.out_port(0))
        node.out_port(0).connect(reshape_out.in_port(0))
コード例 #24
0
def resolve_convolution_with_group(node: Node, group: int, ir_version: str):
    node_name = node.soft_get('name', node.id)
    input_shape = node.in_port(0).data.get_shape()
    assert len(input_shape) in [3, 4, 5]

    weights_shape = node.in_port(1).data.get_shape()
    assert weights_shape is not None
    assert len(weights_shape) in [3, 4, 5]
    group = int64_array(group).item()
    assert weights_shape[0] % group == 0

    if ir_version == 'V7':
        if weights_shape[0] == node.output:
            # weights are already is in [G*O I X Y] format
            return

        num_spatial_dims = len(weights_shape[2:])
        # Reshape has special_zero=True, if zeros are set then original shapes are copied
        zeros_to_copy_spatial_dims = np.zeros(num_spatial_dims)
        new_shape = shape_array([node.output, -1, *zeros_to_copy_spatial_dims])
        reshape = create_op_node_with_second_input(
            node.graph, Reshape, new_shape, {'override_output_shape': True})
    elif ir_version == 'V10':
        I = input_shape[1]
        if is_fully_defined(weights_shape[2:]) and is_fully_defined(I):
            new_shape = shape_array(
                [group, node.output // group, I // group, *weights_shape[2:]])
            assert np.prod(weights_shape) == np.prod(new_shape), 'Initial weights shape {}, grouped weights shape {}' \
                                                                 ''.format(weights_shape, new_shape)
            reshape = create_op_node_with_second_input(
                node.graph, Reshape, new_shape, {
                    'override_output_shape': True,
                    'special_zero': True
                })
        else:
            # if weights/or input channel dimension is dynamic need to to compute new_shape in a new subgraph
            weights_node = node.in_port(1).get_source().node
            input_node = node.in_port(0).get_source().node

            weights_shape = Shape(node.graph, {
                'name': node_name + '/ShapeOfWeights'
            }).create_node()
            weights_shape.in_port(0).connect(weights_node.out_port(0))

            weights_spatial_shape = create_op_with_const_inputs(
                node.graph,
                StridedSlice,
                port_value_dict={
                    1: int64_array([2]),
                    2: int64_array([-1])
                },
                op_attrs={
                    'begin_mask': [1],
                    'end_mask': [0],
                    'new_axis_mask': [0],
                    'shrink_axis_mask': [0],
                    'ellipsis_mask': [0]
                },
                input_node=weights_shape)

            const_part_of_shape = Const(
                node.graph,
                attrs=dict(name=node_name + '/GroupsAndOutputChannelsSize',
                           value=int64_array([group, node.output // group
                                              ]))).create_node()

            input_shape_node = Shape(node.graph, {
                'name': node_name + '/ShapeOfInput'
            }).create_node()
            input_shape_node.in_port(0).connect(input_node.out_port(0))

            input_num_channels = create_op_with_const_inputs(
                node.graph,
                Gather,
                port_value_dict={
                    1: int64_array([1]),
                    2: int64_array(0)
                },
                op_attrs={'name': node_name + '/GatherInputNumChannels'},
                input_node=input_shape_node)

            # input channels num divided by number of groups to alight weights shape into [GROUPS C_OUT C_IN X Y]
            C_IN = create_op_with_const_inputs(
                node.graph,
                Div,
                port_value_dict={1: int64_array(group)},
                op_attrs={'name': node_name + '/Div'},
                input_node=input_num_channels)

            new_shape_node = Concat(node.graph, {
                'axis': 0,
                'in_ports_count': 3
            }).create_node()
            new_shape_node.in_port(0).connect(const_part_of_shape.out_port(0))
            new_shape_node.in_port(1).connect(C_IN.out_port(0))
            new_shape_node.in_port(2).connect(
                weights_spatial_shape.out_port(0))
            reshape = Reshape(node.graph, {
                'override_output_shape': True,
                'special_zero': True
            }).create_node()
            reshape.in_port(1).connect(new_shape_node.out_port(0))

        del node['group']
        node['type'] = 'GroupConvolution'
    else:
        raise Error("Unknown IR version: {}".format(ir_version))

    node.in_port(1).get_connection().insert_node(reshape)
コード例 #25
0
 def extract(cls, node: Node):
     Reshape.update_node_stat(node, {'special_zero': False})
     return cls.enabled
コード例 #26
0
    def find_and_replace_pattern(graph: Graph):
        conv_pool_nodes = graph.get_op_nodes(op='Convolution')
        conv_pool_nodes.extend(graph.get_op_nodes(op='Pooling'))

        if len(conv_pool_nodes) == 0:
            return

        set_time_dim(graph)

        for node in conv_pool_nodes:
            node_name = node.soft_get('name', node.id)

            # create Reshape before convolution
            # shape = [in_shape[0], t, patch_stride, C = in_shape[1]/(patch_stride*t)],
            # where patch_stride is attribute in Convolution taken from Kaldi
            # or before pooling
            # shape = [in_shape[0], t, in_shape[1]/(pool_stride*t), pool_stride]
            # where pool_stride is attribute in Pooling taken from Kaldi
            # adapt time_dim to use in kernel as dimension
            time_dim = node.in_port(0).get_source().node.time_dim + 1
            if node.op == 'Convolution':
                frame_height = node.patch_stride if node.has_valid(
                    'patch_stride') else node.height_in
                # set time t instead of 1 in kernel as H and update C to have kernel shape
                if node.kernel[2] != time_dim:
                    assert node.kernel[2] == 1
                    node.kernel[2] = time_dim
                    assert node.kernel[1] % time_dim == 0
                    node.kernel[1] = node.kernel[1] // time_dim
                    node.kernel_spatial = node.kernel[2:]
                index_const = 2
                index_div = 3
            else:
                frame_height = node.pool_stride
                if node.pool_step is None:
                    node.stride = int64_array(
                        [1, 1, node.window[-1], node.window[-1]])
                index_const = 3
                index_div = 2

            i_shape = Shape(graph, {
                'name': node_name + '/Shape'
            }).create_node()
            i_shape.in_port(0).connect(node.in_port(0).get_source())

            N, H = node_to_get_shape_value_of_indices(
                i_shape,
                [0]), node_to_get_shape_value_of_indices(i_shape, [1])

            # H / (patch_stride * t)
            H_div_stride_t = create_op_with_const_inputs(
                graph, Div, {1: int64_array([frame_height * time_dim])},
                {'name': node_name + '/div_stride_h'})
            H_div_stride_t.in_port(0).connect(H.out_port(0))

            # gather all dims
            concat_dims = create_op_with_const_inputs(
                graph, Concat, {
                    index_const: int64_array([frame_height]),
                    1: int64_array([time_dim])
                }, {
                    'name': node_name + '/concat_all_dims',
                    'in_ports_count': 4,
                    'axis': 0
                })
            concat_dims.in_port(0).connect(N.out_port(0))
            concat_dims.in_port(index_div).connect(H_div_stride_t.out_port(0))

            reshape_in = Reshape(graph, {
                'name': node_name + '/reshape_in',
                'time_dim': time_dim - 1
            }).create_node()
            reshape_in.in_port(1).connect(concat_dims.out_port(0))

            # change layout from NHWC to NCHW
            # should be replaced by common Permute logic in future
            direct_transpose = create_op_node_with_second_input(
                graph, Transpose, int64_array([0, 3, 1, 2]), {
                    'name': node_name + '/Transpose',
                    'time_dime': time_dim - 1
                }, reshape_in)
            # after convolution/pooling time_dim becomes 0
            inverse_transpose = create_op_node_with_second_input(
                graph, Transpose, int64_array([0, 2, 3, 1]), {
                    'name': node_name + '/Transpose_back',
                    'time_dim': 0
                })

            # create Reshape after Convolution
            reshape_out = create_op_node_with_second_input(
                graph, Reshape, int64_array([0, -1]), {
                    'name': node_name + '/reshape_out',
                    'special_zero': True,
                    'time_dim': 0
                })

            # connect input_reshape_node
            source = node.in_port(0).get_source()
            node.in_port(0).get_connection().set_source(
                direct_transpose.out_port(0))
            reshape_in.in_port(0).connect(source)
            # connect output_reshape_node
            node.out_port(0).get_connection().set_source(
                reshape_out.out_port(0))
            node.out_port(0).connect(inverse_transpose.in_port(0))
            reshape_out.in_port(0).connect(inverse_transpose.out_port(0))
            rename_nodes([(node, node_name + '/' + node.op),
                          (reshape_out, node_name)])

        for node in graph.get_op_nodes():
            if 'time_dim' in node:
                del node['time_dim']
コード例 #27
0
    def mxrepeat_decomposition(node: Node):
        graph = node.graph
        name = node.soft_get('name', node.id)

        rename_node(node, name + '/to_be_removed')

        # Unqueeze
        input_rank = Rank(graph, {'name': name + '/Rank'}).create_node()
        node.in_port(0).get_source().connect(input_rank.in_port(0))

        axis = get_canonical_axis_index_node(input_rank, node.axis)
        unsqueeze_axis = create_op_node_with_second_input(
            graph,
            Add,
            int64_array([1]), {'name': name + '/Unsqueeze/Axis'},
            input_node=axis)

        unsqueeze = Unsqueeze(graph, {
            'name': name + '/Unsqueeze'
        }).create_node()
        unsqueeze.in_port(1).connect(unsqueeze_axis.out_port(0))

        # Tile (1, 1, ..., repeats, ..., 1)
        # we generate tile array according to the following table:

        # parts:       |      first      |  repeats |  second     |
        # i:           | 0, 1, ..., axis,| axis + 1,| ..., rank+1 |
        # tile_array:  | 1, 1, ...,  1  ,| repeats ,| ...,   1    |

        one = Const(graph, {
            'name': name + '/Broadcast/One',
            'value': int64_array([1])
        }).create_node()
        first_ones = Broadcast(graph, {
            'name': name + '/Broadcast/Ones_first_part'
        }).create_node()
        first_ones.in_port(0).connect(one.out_port(0))
        first_ones.in_port(1).connect(unsqueeze_axis.out_port(0))

        repeats = Const(graph, {
            'name': name + '/repeats',
            'value': int64_array([node.repeats])
        }).create_node()

        second_ones = Broadcast(graph, {
            'name': name + '/Broadcast/Ones_second_part'
        }).create_node()
        second_part_broadcast_shape = Sub(
            graph, {
                'name': name + '/Broadcast/Shape/second_part'
            }).create_node()
        second_part_broadcast_shape.in_port(0).connect(input_rank.out_port(0))
        second_part_broadcast_shape.in_port(1).connect(
            unsqueeze_axis.out_port(0))
        second_ones.in_port(0).connect(one.out_port(0))
        second_ones.in_port(1).connect(second_part_broadcast_shape.out_port(0))

        tile_repeats = new_shape_node_from_shape_nodes(
            [first_ones, repeats, second_ones])
        tile = Tile(graph, {'name': name + '/Tile'}).create_node()
        tile.in_port(1).connect(tile_repeats.out_port(0))

        # Reshape (input_shape[:axis], input_shape[axis] * repeats, input_shape[axis+1:])
        # we generate reshape dim array according to the following table:

        # parts:       |    first   |                rep           |  second   |
        # i:           | 0, 1, ... ,|               axis,          | ..., rank |
        # dim_array:   | inp_sh[i] ,| input_shape[axis] * repeats ,| inp_sh[i] |

        input_shape = Shape(graph, {'name': name + '/Shape'}).create_node()
        node.in_port(0).get_source().connect(input_shape.in_port(0))

        first_input_shape_part = get_shape_values_by_range_idxs(
            input_shape,
            input_rank,
            begin=0,
            end=node.axis,
            include_begin=True,
            include_end=False)

        original_axis_dim = create_op_with_const_inputs(
            graph,
            Gather, {2: int64_array(0)}, {'name': name + '/OriginalDim'},
            input_node=input_shape)
        original_axis_dim.in_port(1).connect(axis.out_port(0))

        repeated_dimention = Mul(graph, {
            'name': name + '/RepeatedDim'
        }).create_node()
        repeated_dimention.in_port(0).connect(original_axis_dim.out_port(0))
        repeated_dimention.in_port(1).connect(repeats.out_port(0))

        second_input_shape_part = get_shape_values_by_range_idxs(
            input_shape,
            input_rank,
            begin=node.axis,
            end=-1,
            include_begin=False,
            include_end=True)

        output_shape = new_shape_node_from_shape_nodes([
            first_input_shape_part, repeated_dimention, second_input_shape_part
        ])

        reshape = Reshape(graph, {'name': name}).create_node()
        rename_node(reshape, name)
        reshape.in_port(1).connect(output_shape.out_port(0))

        # Final connections
        node.in_port(0).get_connection().set_destination(unsqueeze.in_port(0))
        tile.in_port(0).connect(unsqueeze.out_port(0))
        reshape.in_port(0).connect(tile.out_port(0))
        node.out_port(0).get_connection().set_source(reshape.out_port(0))
コード例 #28
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        mxreshape = match['op']
        if not mxreshape.reverse:
            return

        shape_node = Shape(graph,
                           dict(name=mxreshape.id + '/Shape')).create_node()
        forward_reverse_unsqueeze_node = create_op_node_with_second_input(
            graph, Unsqueeze, int64_array([0]),
            dict(name=str(mxreshape.id) + '/ForwardUnsqueeze'))
        forward_reverse_node = Reverse(
            graph, dict(name=mxreshape.id + '/ForwardReverse',
                        axis=1)).create_node()

        forward_reverse_squeeze_node = create_op_node_with_second_input(
            graph, Squeeze, int64_array([0]),
            dict(name=str(mxreshape.id) + '/ForwardSqueeze'))
        reshape_node = Reshape(graph, dict(name=mxreshape.id +
                                           '/Reshape')).create_node()
        shape_node.in_port(0).connect(mxreshape.in_port(0).get_source())
        mxreshape.in_port(0).get_connection().set_destination(
            reshape_node.in_port(0))

        forward_reverse_unsqueeze_node.in_port(0).connect(
            shape_node.out_port(0))
        forward_reverse_node.in_port(0).connect(
            forward_reverse_unsqueeze_node.out_port(0))
        forward_reverse_squeeze_node.in_port(0).connect(
            forward_reverse_node.out_port(0))
        reshape_node.in_port(1).connect(
            forward_reverse_squeeze_node.out_port(0))

        reshape_shape_node = create_op_node_with_second_input(
            graph, Reshape, int64_array(np.flip(mxreshape.dim, 0)),
            dict(name=str(mxreshape.id) + '/ReshapeShape'))
        if np.sum(np.in1d([-2, -3, -4], mxreshape.dim), axis=0):
            reshape_shape_node = MXReshape(
                graph,
                dict(name=mxreshape.id + '/Reshape',
                     dim=int64_array(np.flip(mxreshape.dim,
                                             0)))).create_node()

        reshape_shape_node.in_port(0).connect(reshape_node.out_port(0))

        backward_shape_node = Shape(graph,
                                    dict(name=mxreshape.id +
                                         '/BackwardShape')).create_node()
        backward_reverse_unsqueeze_node = create_op_node_with_second_input(
            graph, Unsqueeze, int64_array([0]),
            dict(name=str(mxreshape.id) + '/BackwardUnsqueeze'))
        backward_reverse_node = Reverse(
            graph, dict(name=mxreshape.id + '/BackwardReverse',
                        axis=1)).create_node()
        backward_reverse_squeeze_node = create_op_node_with_second_input(
            graph, Squeeze, int64_array([0]),
            dict(name=str(mxreshape.id) + '/BackwardSqueeze'))
        backward_reshape_node = Reshape(
            graph, dict(name=mxreshape.id + '/BackwardReshape')).create_node()

        backward_shape_node.in_port(0).connect(reshape_shape_node.out_port(0))
        backward_reverse_unsqueeze_node.in_port(0).connect(
            backward_shape_node.out_port(0))
        backward_reverse_node.in_port(0).connect(
            backward_reverse_unsqueeze_node.out_port(0))
        backward_reverse_squeeze_node.in_port(0).connect(
            backward_reverse_node.out_port(0))

        backward_reshape_node.in_port(0).connect(
            reshape_shape_node.out_port(0))
        backward_reshape_node.in_port(1).connect(
            backward_reverse_squeeze_node.out_port(0))

        mxreshape.out_port(0).get_connection().set_source(
            backward_reshape_node.out_port(0))
コード例 #29
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        if not check_applicability(match):
            return

        reshape = match['reshape']
        div_name = match['division'].name

        input_shape = Shape(graph, dict(name=div_name +
                                        '/shape/MVN_T_')).create_node()
        shape_of_reshape = reshape.in_port(
            1).get_connection().get_source().node.value
        c1, c2 = shape_of_reshape[1], shape_of_reshape[2]
        c = c1 * c2

        new_reshape = create_op_node_with_second_input(
            graph, Reshape, int64_array([0, 0, 0, c1, c2]),
            dict(name=div_name + '/first_reshape/MVN_T_'))
        permute_order = int64_array([0, 1, 2, 4, 3])
        first_permute = create_op_node_with_second_input(
            graph, Transpose, permute_order,
            dict(name=div_name + '/first_permute/MVN_T_'), new_reshape)

        add = match['add']
        variance = match['variance']
        eps_port_num = 0 if add.in_port(
            0).get_connection().get_source().node.id != variance.id else 1
        eps = add.in_port(eps_port_num).get_connection().get_source().node
        mvn_node = create_op_with_const_inputs(
            graph, MVN, {1: int64_array([1, 2, 3])},
            dict(name=div_name + '/MVN/MVN_T_',
                 eps=eps.value,
                 normalize_variance=1,
                 eps_mode='inside_sqrt'))
        first_permute.out_port(0).connect(mvn_node.in_port(0))

        second_permute = create_op_node_with_second_input(
            graph, Transpose, permute_order,
            dict(name=div_name + '/second_permute/MVN_T_'), mvn_node)
        new_reshape2 = Reshape(graph,
                               dict(name=div_name +
                                    '/second_reshape/MVN_T_')).create_node()
        second_permute.out_port(0).connect(new_reshape2.in_port(0))
        gamma_val = np.reshape(
            match['gamma_identity'].in_port(
                0).get_connection().get_source().node.value,
            int64_array([1, 1, 1, c]))
        new_mul = create_op_node_with_second_input(
            graph, Mul, gamma_val, dict(name=match['mul'].name + '/MVN_T_'),
            new_reshape2)
        beta_val = np.reshape(
            match['beta_identity'].in_port(
                0).get_connection().get_source().node.value,
            int64_array([1, 1, 1, c]))
        new_add2 = create_op_node_with_second_input(
            graph, Add, beta_val, dict(name=match['add2'].name + '/MVN_T_'),
            new_mul)

        transpose_connection = match['transpose'].in_port(0).get_connection()
        before_transpose = transpose_connection.get_source().node
        transpose_connection.set_destination(new_reshape.in_port(0))
        input_shape.out_port(0).connect(new_reshape2.in_port(1))
        before_transpose.out_port(0).connect(input_shape.in_port(0))
        match['transpose2'].out_port(0).get_connection().set_source(
            new_add2.out_port(0))
コード例 #30
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['conv']
        node_name = node.soft_get('name', node.id)

        # create Reshape before convolution
        # if transpose will be applied (new models)
        #   shape = [in_shape[0], t= in_shape[1]/(patch_stride*t), patch_stride, C=1]
        # else (for old models to avoid fails on GNA - should be removed as soon as GNA will be changed)
        #   shape = [in_shape[0], t= in_shape[1]/(patch_stride*t), C=1, patch_stride]
        sp_dim_1 = 1
        if node.has_valid('patch_stride'):
            channel_dim = 2
            sp_dim_2 = 3
            frame_height = node.patch_stride
        else:
            channel_dim = 3
            sp_dim_2 = 2
            frame_height = node.height_in

        i_shape = Shape(graph, {'name': node_name + '/Shape'}).create_node()
        i_shape.in_port(0).connect(node.in_port(0).get_source())

        N, H = node_to_get_shape_value_of_indices(
            i_shape, [0]), node_to_get_shape_value_of_indices(i_shape, [1])

        div = create_op_with_const_inputs(
            graph, Div, {1: int64_array([frame_height * node.kernel[1]])},
            {'name': node_name + '/div_stride_h'})
        div.in_port(0).connect(H.out_port(0))

        concat = create_op_with_const_inputs(
            graph, Concat, {
                sp_dim_2: int64_array([frame_height]),
                channel_dim: int64_array([node.kernel[1]])
            }, {
                'name': node_name + '/concat_all_dims',
                'in_ports_count': 4,
                'axis': 0
            })
        concat.in_port(0).connect(N.out_port(0))
        concat.in_port(sp_dim_1).connect(div.out_port(0))

        reshape_in = Reshape(graph, {
            'name': node_name + '/reshape_in'
        }).create_node()
        reshape_in.in_port(1).connect(concat.out_port(0))

        # change layout from NHWC to NCHW
        # should be replaced by common Permute logic in future
        transpose = None
        if channel_dim == 3 and node.channel_dims == 1:
            transpose = create_op_node_with_second_input(
                graph, Transpose, int64_array([0, 3, 1, 2]),
                {'name': node.name + '/Transpose'}, reshape_in)

        # create Reshape after Convolution
        reshape_out = create_op_node_with_second_input(
            graph, Reshape, int64_array([0, -1]),
            {'name': node_name + '/reshape_out'})

        # connect input_reshape_node
        source = node.in_port(0).get_source()
        node.in_port(0).get_connection().set_source(
            transpose.out_port(0) if transpose else reshape_in.out_port(0))
        reshape_in.in_port(0).connect(source)
        # connect output_reshape_node
        node.out_port(0).get_connection().set_source(reshape_out.out_port(0))
        node.out_port(0).connect(reshape_out.in_port(0))