def replace_pattern(self, graph: Graph, match: dict):
        if graph.graph['layout'] != "NCHW":
            return

        node = match['op']

        in_node = node.in_node(0)
        out_node = node.out_node(0)
        group = int(node['group'])

        rows = group
        cols = in_node.shape[1] // group

        if rows * cols != in_node.shape[1]:
            raise Error(
                "Group {} should divide input channels number {} without reminder for node {}"
                "".format(group, in_node.shape[1], node.id))

        reshape_split_node = create_op_node_with_second_input(
            graph, Reshape, int64_array([in_node.shape[0], rows, cols, -1]),
            {'name': node.id + '/Reshape_split_'})

        transpose_node = create_op_node_with_second_input(
            graph, Transpose, int64_array([0, 2, 1, 3]),
            {'name': node.id + '/Transpose_'}, reshape_split_node)

        reshape_concat = create_op_node_with_second_input(
            graph, Reshape, out_node.shape,
            {'name': node.id + '/Reshape_concat_'}, transpose_node)

        node.in_port(0).get_connection().set_destination(
            reshape_split_node.in_port(0))
        node.out_port(0).get_connection().set_source(
            reshape_concat.out_port(0))
Beispiel #2
0
    def replace_pattern(self, graph: Graph, match: Dict[str, Node]):
        concat_node = match['concat']
        concat_node['axis'] = 1
        concat_name = concat_node.soft_get('name', concat_node.id)

        concat_reshape = create_op_node_with_second_input(graph, Reshape, int64_array([1, 2, -1]), op_attrs=dict(
            name=concat_name + '/Reshape'))
        split_node = create_op_node_with_second_input(graph, Split, int64_array(1), op_attrs=dict(
            name=concat_name + '/Split', num_splits=2), input_node=concat_reshape)
        split_node_reshape = create_op_node_with_second_input(graph, Reshape, int64_array([-1, 4]), op_attrs=dict(
            name=split_node.name + '/Reshape'))
        split_node.out_port(0).connect(split_node_reshape.in_port(0))
        value = create_op_node_with_second_input(graph, Split, int64_array(1), op_attrs=dict(
            name=split_node_reshape.name + '/Split', num_splits=4), input_node=split_node_reshape)

        xmin, xmax = calculate_prior_box_value(value, value_to_div=value.out_port(2), value_to_add=value.out_port(0))
        ymin, ymax = calculate_prior_box_value(value, value_to_div=value.out_port(3), value_to_add=value.out_port(1))

        concat_slice_value = Concat(graph, dict(name=value.name + '/Concat', in_ports_count=4, axis=1)).create_node()
        for ind, node in enumerate([xmin, ymin, xmax, ymax]):
            concat_slice_value.in_port(ind).connect(node.out_port(0))

        reshape_concat_values = create_op_node_with_second_input(graph, Reshape, int64_array([1, 1, -1]),
                                                                 op_attrs=dict(name=concat_slice_value.name + '/Reshape'),
                                                                 input_node=concat_slice_value)
        concat = Concat(graph, dict(name=reshape_concat_values.name + '/Concat', in_ports_count=2, axis=1)).create_node()
        concat.in_port(0).connect(reshape_concat_values.out_port(0))
        concat.in_port(1).connect(split_node.out_port(1))

        match['detection_output'].in_port(2).get_connection().set_source(concat.out_port(0))
        concat_node.out_port(0).get_connection().set_destination(concat_reshape.in_port(0))
    def replace_op(self, graph: Graph, node: Node):
        if node.has_and_set('inputs_preprocessed'):
            log.debug('Node "{}" has already been preprocessed'.format(
                node.soft_get('name')))
            return []
        # reshape tensor with batch indices to 2d
        unsqueeze_node = create_op_node_with_second_input(
            graph, Unsqueeze, int64_array([1]),
            {'name': node.name + '/Unsqueeze'}, node.in_node(2))

        concat_op = Concat(
            graph, {
                'axis': 1,
                'name': node.name + '/concat_batch_indices_and_boxes',
                'in_ports_count': 2
            })
        concat_node = concat_op.create_node([unsqueeze_node, node.in_node(1)])

        # do not remove edge with crop_size because it is needed in the partial infer
        graph.remove_edge(node.in_node(1).id, node.id)

        # input to the CropAndResize contains boxes coordinates in YXYX layout. But IE layer ROIPooling expects
        # coordinates in the XYXY layout, so convolution is added here to swap coordinates
        swapped_box_coordinates_node = add_convolution_to_swap_xy_coordinates(
            graph, concat_node, 5)

        # reshape locations tensor to 2D so it could be passed to Eltwise which will be converted to ScaleShift
        reshape_2d_node = create_op_node_with_second_input(
            graph, Reshape, int64_array([-1, 5]),
            dict(name=swapped_box_coordinates_node.id + '/reshape_2d_'),
            swapped_box_coordinates_node)
        graph.create_edge(reshape_2d_node, node, 0, 1)

        # do not replace any output edge
        return []
    def add_leading_and_trailing_reshape(graph: Graph, nodes: list):
        """
        When the first operation in the matched list is the Transpose then add the Reshape operation which reshapes to the
        Transpose input shape. This Reshape op is needed for the optimization pass. If the optimization will not be
        applied then this dummy Reshape will be removed by the "RemoveRedundantReshapes" pass.

        :param graph: the graph with nodes
        :param nodes: the sequence of Transpose and ReshapeFF nodes
        :return: None
        """
        # add leading Reshape
        if nodes[0].type == 'Transpose':
            dummy_reshape_node = create_op_node_with_second_input(
                graph, Reshape, nodes[0].in_port(0).data.get_shape().copy(),
                {'name': nodes[0].in_port(0).get_connection().get_source().node.id + '/Reshape'})
            dummy_reshape_node[__class__.OPTIMIZED_NODE_FLAG] = True
            nodes[0].in_port(0).get_connection().insert_node(dummy_reshape_node)
            nodes.insert(0, dummy_reshape_node)
            log.debug('Added Reshape op "{}" in the beginning of the permute-reshape sequence'.format(
                dummy_reshape_node.soft_get('name')))

        # similarly add the Reshape op after the last Transpose op which reshapes to the Transpose output shape
        if nodes[-1].type == 'Transpose':
            dummy_reshape_node = create_op_node_with_second_input(
                graph, Reshape, nodes[-1].out_port(0).data.get_shape().copy(),
                {'name': nodes[0].out_port(0).get_connection().get_destination().node.id + '/Reshape'})
            dummy_reshape_node[__class__.OPTIMIZED_NODE_FLAG] = True
            nodes[-1].out_port(0).get_connection().insert_node(dummy_reshape_node)
            nodes.append(dummy_reshape_node)
            log.debug('Added Reshape op "{}" in the end of the permute-reshape sequence'.format(
                dummy_reshape_node.soft_get('name')))
Beispiel #5
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['op']
        shape = node.in_port(0).data.get_shape().copy()

        assert shape[1] % node.group == 0

        power_node = create_op_node_with_second_input(
            graph, Pow, node.p, {'name': node.id + '_power'})

        reshape_node = create_op_node_with_second_input(
            graph, Reshape,
            int64_array([shape[0], shape[1] / node.group, node.group]),
            {'name': node.id + '_reshape'})
        reshape_node.in_port(0).connect(power_node.out_port(0))

        reducesum_node = create_op_node_with_second_input(
            graph, ReduceSum, int64_array([2]), {
                'name': node.id + '_sum',
                'keep_dims': False
            })
        reducesum_node.in_port(0).connect(reshape_node.out_port(0))

        invpower_node = create_op_node_with_second_input(
            graph, Pow, 1.0 / node.p, {'name': node.id + '_invpower'})

        invpower_node.in_port(0).connect(reducesum_node.out_port(0))

        node.in_port(0).get_connection().set_destination(power_node.in_port(0))
        node.out_port(0).get_connection().set_source(invpower_node.out_port(0))
    def replace_pattern(graph: Graph, match: dict):
        mxrepeat = match['repeat']

        out_shape = mxrepeat.out_node(0).shape
        unsqueeze_node = create_op_node_with_second_input(
            graph, Unsqueeze, int64_array([mxrepeat.axis + 1]),
            dict(name=mxrepeat.id + "/ExpandDims",
                 expand_axis=int64_array([mxrepeat.axis])))

        tile_array = np.ones([len(mxrepeat.in_node().shape) + 1],
                             dtype=np.int64)
        tile_array[mxrepeat.axis + 1] = mxrepeat.repeats
        tile_node = create_op_node_with_second_input(
            graph, Tile, int64_array(tile_array),
            dict(name=mxrepeat.id + "/Tile", axis=mxrepeat.axis + 1))

        reshape_node = create_op_node_with_second_input(
            graph, Reshape, int64_array(out_shape),
            dict(name=mxrepeat.id + "/Reshape"))

        mxrepeat.in_port(0).get_connection().set_destination(
            unsqueeze_node.in_port(0))
        tile_node.in_port(0).connect(unsqueeze_node.out_port(0))
        reshape_node.in_port(0).connect(tile_node.out_port(0))
        mxrepeat.out_port(0).get_connection().set_source(
            reshape_node.out_port(0))
Beispiel #7
0
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_op_nodes(op='SpaceToBatch') + graph.get_op_nodes(op='BatchToSpace'):
            node.add_input_port(3, skip_if_exist=True)

            # convert TF representation of the pads/crops as [N, 2] to IE representation: [N] and [N]
            transposed_pads = create_op_with_const_inputs(graph, Transpose, {1: int64_array([1, 0])})
            node.in_port(2).get_connection().set_destination(transposed_pads.in_port(0))
            split_pads = create_op_with_const_inputs(graph, Split, {1: int64_array(0)}, {'num_splits': 2})
            transposed_pads.out_port(0).connect(split_pads.in_port(0))
            for port_ind in range(2):
                node.in_port(port_ind + 2).connect(split_pads.out_port(port_ind))
                node.in_port(port_ind + 2).get_connection().insert_node(
                    create_op_with_const_inputs(graph, Squeeze, {1: int64_array([0])}))

            # add zeros/ones to related inputs to align it with data input
            in0_rank = Rank(graph, {'name': node.name + '/rank_0'}).create_node()
            in1_rank = Shape(graph, {'name': node.name + '/rank_1'}).create_node()

            diff_size = Sub(graph, {'name': node.name + '/sub_0'}).create_node()
            diff = Sub(graph, {'name': node.name + '/sub_1'}).create_node()

            const_begin = Const(graph, {'value': int64_array([1])}).create_node()
            const_pad_val = Const(graph, {'value': int64_array([1])}).create_node()

            block_shape = Pad(graph, {'name': node.name + '/aligned_block_shape', 'mode': 'constant'}).create_node()

            # in case of SpaceToBatch begin = pads_begin, end = pads_end
            # in case of BatchToSpace begin = crops_begin, end = crops_end
            new_begin_name = '/aligned_pads_begin'
            new_end_name = '/aligned_pads_end'
            if node.type == 'BatchToSpace':
                new_begin_name = '/aligned_crops_begin'
                new_end_name = '/aligned_crops_end'

            begin = Pad(graph, {'name': node.name + new_begin_name, 'mode': 'constant'}).create_node()
            end = Pad(graph, {'name': node.name + new_end_name, 'mode': 'constant'}).create_node()

            in0_rank_1d = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
                                                           {'name': node.name + '/1d_rank_of_0'}, in0_rank)
            in1_rank_1d = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
                                                           {'name': node.name + '/1d_rank_of_1'}, in1_rank)

            node.in_port(0).get_source().connect(in0_rank.in_port(0))
            node.in_port(1).get_source().connect(in1_rank.in_port(0))
            in0_rank_1d.out_port(0).connect(diff_size.in_port(0))
            in1_rank_1d.out_port(0).connect(diff_size.in_port(1))
            diff_size.out_port(0).connect(diff.in_port(0))
            const_begin.out_port(0).connect(diff.in_port(1))
            const_pad_val.out_port(0).connect(block_shape.in_port(3))

            inputs_array = [block_shape, begin, end]
            for idx, input_to_node in enumerate(inputs_array):
                node.in_port(idx + 1).get_connection().set_destination(input_to_node.in_port(0))
                const_begin.out_port(0).connect(input_to_node.in_port(1))
                diff.out_port(0).connect(input_to_node.in_port(2))
                input_to_node.out_port(0).connect(node.in_port(idx + 1))
Beispiel #8
0
    def decompose_shuffle_channel(node: Node):
        graph = node.graph
        name = node.soft_get('name', node.id)

        rename_node(node, name + '/to_be_removed')

        shape = Shape(graph, dict(name=name + '/InputShape')).create_node()
        shape.in_port(0).connect(node.in_port(0).get_source())

        # Reshape [input_batch, group, input_channels/group, -1]
        batch = node_to_get_batch_value(shape)
        group = Const(
            graph, dict(name=name + '/Rows',
                        value=int64_array([node.group]))).create_node()
        const = Const(graph, dict(name=name + '/Const',
                                  value=int64_array([-1]))).create_node()

        input_channels = node_to_get_features_dimension_value(shape)
        output_channels = create_op_node_with_second_input(
            graph,
            Div,
            np.int64(node.group), {'name': name + '/Cols'},
            input_node=input_channels)
        i_output_channels = Cast(graph, {
            'name': output_channels.name + '/Convert',
            'dst_type': np.int64
        }).create_node()
        output_channels.out_port(0).connect(i_output_channels.in_port(0))

        reshape_split_dim = new_shape_node_from_shape_nodes(
            [batch, group, i_output_channels, const])
        reshape_split_node = Reshape(
            graph, dict(name=name + '/Reshape_split_')).create_node()
        reshape_split_dim.out_port(0).connect(reshape_split_node.in_port(1))

        # Transpose(0, 2, 1, 3)
        transpose_node = create_op_node_with_second_input(
            graph,
            Transpose,
            int64_array([0, 2, 1, 3]), {'name': name + '/Transpose_'},
            input_node=reshape_split_node)

        # Reshape back to input shape
        reshape_concat = Reshape(graph, dict(name=name)).create_node()
        rename_node(reshape_concat, name)

        shape.out_port(0).connect(reshape_concat.in_port(1))
        transpose_node.out_port(0).connect(reshape_concat.in_port(0))

        # Final connections
        node.in_port(0).get_connection().set_destination(
            reshape_split_node.in_port(0))
        node.out_port(0).get_connection().set_source(
            reshape_concat.out_port(0))
    def find_and_replace_pattern(self, graph: Graph):
        for nms in graph.get_op_nodes(op='NonMaxSuppression'):
            # prepare inputs to the NonMaximumSuppression Node
            unsqueeze_boxes = create_op_node_with_second_input(
                graph, Unsqueeze, int64_array([0]),
                {'name': nms.soft_get('name') + '/Unsqueeze_0'})
            nms.in_port(0).get_connection().insert_node(unsqueeze_boxes)

            unsqueeze_box_scores = create_op_node_with_second_input(
                graph, Reshape, int64_array([1, 1, -1]),
                {'name': nms.soft_get('name') + '/Unsqueeze_1'})
            nms.in_port(1).get_connection().insert_node(unsqueeze_box_scores)

            nms_name = nms.soft_get('name', nms.id)

            # prepare output #0
            crop_box_indices_name = nms_name + '/Crop_boxes_'
            crop_box_indices = Crop(
                graph, {
                    'name': crop_box_indices_name,
                    'axis': int64_array([1]),
                    'offset': int64_array([2]),
                    'dim': int64_array([1])
                }).create_node()
            nms.out_port(0).get_connection().insert_node(crop_box_indices)
            squeeze_output_boxes = create_op_node_with_second_input(
                graph, Squeeze, int64_array([1]),
                {'name': crop_box_indices_name + '/Squeeze'})
            crop_box_indices.out_port(0).get_connection().insert_node(
                squeeze_output_boxes)

            num_of_outputs = len([
                port for port in nms.out_ports().values()
                if not port.disconnected()
            ])

            if num_of_outputs == 1:
                continue

            # prepare output #1
            crop_score_indices_name = nms_name + '/Crop_scores_'
            crop_score_indices = Crop(
                graph, {
                    'name': crop_score_indices_name,
                    'axis': int64_array([1]),
                    'offset': int64_array([2]),
                    'dim': int64_array([1])
                }).create_node()
            nms.out_port(1).get_connection().insert_node(crop_score_indices)
            squeeze_output_scores = create_op_node_with_second_input(
                graph, Squeeze, int64_array([1]),
                {'name': crop_score_indices_name + '/Squeeze'})
            crop_score_indices.out_port(0).get_connection().insert_node(
                squeeze_output_scores)
Beispiel #10
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['pool']

        if node.pool_step is None:
            node.stride = int64_array([1, 1, node.window[-1], node.window[-1]])

        # create Reshape before convolution
        # shape = [in_shape[0], in_shape[1]/patch_stride, 1, patch_stride]
        shape = Shape(graph, {}).create_node()
        shape.in_port(0).connect(node.in_port(0).get_source())

        split = create_op_with_const_inputs(graph, VariadicSplit, {
            1: int64_array(0),
            2: int64_array([1, -1])
        }, {'out_ports_count': 2}, shape)
        node_pool_stride = Const(graph, {
            'value': int64_array([node.pool_stride])
        }).create_node()
        pow_node = create_op_node_with_second_input(graph, Pow,
                                                    int64_array([-1]))
        pow_node.in_port(0).connect(node_pool_stride.out_port(0))

        mul = Mul(graph, {}).create_node()
        mul.in_port(0).connect(split.out_port(1))
        mul.in_port(1).connect(pow_node.out_port(0))

        const_1 = Const(graph, {'value': int64_array([1])}).create_node()

        concat = Concat(graph, {'in_ports_count': 4, 'axis': 0}).create_node()
        concat.in_port(0).connect(split.out_port(0))
        concat.in_port(3).connect(mul.out_port(0))
        concat.in_port(2).connect(const_1.out_port(0))
        concat.in_port(1).connect(node_pool_stride.out_port(0))

        reshape_in = Reshape(graph, {
            'name': '/Reshape/' + node.name
        }).create_node()
        reshape_in.in_port(1).connect(concat.out_port(0))

        # create Reshape after Convolution
        reshape_out = create_op_node_with_second_input(
            graph, Reshape, int64_array([0, -1]),
            {'name': node.name + '/Reshape/'})

        # connect input_reshape_node
        source = node.in_port(0).get_source()
        node.in_port(0).get_connection().set_source(reshape_in.out_port(0))
        reshape_in.in_port(0).connect(source)
        # connect output_reshape_node
        node.out_port(0).get_connection().set_source(reshape_out.out_port(0))
        node.out_port(0).connect(reshape_out.in_port(0))
Beispiel #11
0
    def replace_pattern(self, graph: Graph, match: dict):
        node = match['op']
        node.is_training = False

        shape = node.in_port(1).data.get_shape()
        assert shape is not None, 'The shape of scale input of the BatchNorm node {} is not defined'.format(node.name)

        bn_mean = Const(graph, {'name': node.name + '/mean', 'value': np.zeros(shape, dtype=np.float32),
                                'override_output_shape': True}).create_node()
        bn_std = Const(graph, {'name': node.name + '/std', 'value': np.ones(shape, dtype=np.float32),
                               'override_output_shape': True}).create_node()
        node.in_port(3).get_connection().set_source(bn_mean.out_port(0))
        node.in_port(4).get_connection().set_source(bn_std.out_port(0))

        # save the original shape
        original_shape = Shape(graph, {'name': node.in_port(0).get_source().node.soft_get('name')}).create_node()
        original_shape.in_port(0).connect(node.in_port(0).get_source())

        mvn = MVN(graph, {'name': node.name + '/mvn_', 'eps': node.soft_get('eps', 1e-6),
                          'override_output_shape': True}).create_node()
        node.in_port(0).get_connection().insert_node(mvn)

        reshape_4d = create_op_node_with_second_input(graph, Reshape, int64_array([1, -1, 0, 0]),
                                                      {'override_output_shape': True,
                                                       'name': node.soft_get('name') + '/fused_batch_and_channels'})
        mvn.in_port(0).get_connection().insert_node(reshape_4d)

        # restore original shape
        reshape_back = Reshape(graph, {'name': mvn.soft_get('name') + '/restore_shape',
                                       'override_output_shape': True}).create_node()
        reshape_back.in_port(1).connect(original_shape.out_port(0))
        mvn.out_port(0).get_connection().insert_node(reshape_back)
Beispiel #12
0
    def find_and_replace_pattern(self, graph: Graph):
        for roll_node in graph.get_op_nodes(op='Roll'):
            if not roll_node.in_port(2).disconnected():
                return
            node_name = roll_node.soft_get('name', roll_node.id)

            # reshape to 1d tensor
            reshape_to_1d = create_op_node_with_second_input(
                graph, Reshape, int64_array([-1]),
                {'name': node_name + '/reshape'})
            roll_node.in_port(0).get_connection().insert_node(reshape_to_1d)

            # add zero const as axes input to roll
            const_zero = Const(graph, {
                'value': int64_array([0]),
                'name': node_name + '/axes'
            }).create_node()
            const_zero.out_port(0).connect(roll_node.in_port(2))

            # reshape to original shape
            shape_of = Shape(graph, {
                'name': node_name + '/shape_of'
            }).create_node()
            roll_node.in_port(0).get_connection().add_destination(
                shape_of.in_port(0))
            reshape_to_orig_shape = Reshape(graph, {}).create_node()
            rename_nodes([(roll_node, node_name + '/roll'),
                          (reshape_to_orig_shape, node_name)])
            shape_of.out_port(0).connect(reshape_to_orig_shape.in_port(1))
            roll_node.out_port(0).get_connection().insert_node(
                reshape_to_orig_shape)
    def replace_sub_graph(self, graph: Graph, match: dict):
        """
        Need to find the pattern: SoftmaxActivation -> DetectionOutput
        DetectionOutput in IE expects flattened input from SoftMax, that is why there is the need to add
        Flatten layer

        Parameters
        ----------
        graph : Graph
           Graph with loaded model.
         match : dict
           Patterns which were found in graph structure.
        """
        softmax_activation = match['softmax_activation']
        multi_box_detection = match['multi_box_detection']
        softmax_activation['axis'] = -1
        edge_data = graph.get_edge_data(softmax_activation.id, multi_box_detection.id)
        out_port = edge_data[0]['out']
        in_port = edge_data[0]['in']
        graph.remove_edge(softmax_activation.id, multi_box_detection.id)
        new_reshape_node = create_op_node_with_second_input(graph, Reshape, int64_array([0, -1]),
                                                            dict(op='Reshape',
                                                                 name=multi_box_detection.name + '/Reshape_'),
                                                            softmax_activation)
        graph.create_edge(new_reshape_node, multi_box_detection, in_port=in_port, out_port=out_port)
    def squeeze_initial_states(graph: Graph, match: dict):
        """
        Squeeze input initial states of recurrent node to 2-D shape.
        """
        hidden_init_port = 5
        cell_init_port = 6

        rnn_layer = match['rnn_layer']
        # Add input ports to rnn_layer
        rnn_layer.add_sequence_of_ports(type='in', rng=range(7))
        rnn_layer_name = rnn_layer.soft_get('name', rnn_layer.id)

        assert hidden_init_port in rnn_layer.in_nodes()
        hidden_size = rnn_layer.hidden_size
        shape = Shape(graph, dict(name=rnn_layer_name + '/ShapeOf')).create_node()
        rnn_layer.in_port(0).get_source().connect(shape.in_port(0))

        batch = node_to_get_shape_value_of_indices(shape, int64_array([rnn_layer.batch_dim]))
        new_dim = create_op_node_with_second_input(graph, Concat, second_input_value=int64_array([hidden_size]),
                                                   op_attrs=dict(name=rnn_layer_name + '/HiddenStateResizeDim',
                                                                 in_ports_count=2, axis=0), input_node=batch)
        reshape_h = Reshape(graph, dict(name=rnn_layer_name + '/HiddenStateResize', override_output_shape=True)).create_node()
        new_dim.out_port(0).connect(reshape_h.in_port(1))
        rnn_layer.in_port(hidden_init_port).get_connection().insert_node(reshape_h)

        if rnn_layer.op == 'LSTM':
            assert cell_init_port in rnn_layer.in_nodes()

            reshape_c = Reshape(graph, dict(name=rnn_layer_name + '/CellStateResize', override_output_shape=True)).create_node()
            new_dim.out_port(0).connect(reshape_c.in_port(1))
            rnn_layer.in_port(cell_init_port).get_connection().insert_node(reshape_c)
Beispiel #15
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['op']
        if node.has_port('in', 2) and not node.in_port(
                2).disconnected() and not node.has_and_set('shape_input'):
            bias_name = node.name
            new_node_name = node.name + '/WithoutBiases'
            add = Add(graph, dict(name=bias_name)).create_node()
            rename_nodes([(node, new_node_name), (add, bias_name)])
            node.out_port(0).get_connection().set_source(add.out_port(0))
            node.out_port(0).connect(add.in_port(0))
            node.in_port(2).get_connection().set_destination(add.in_port(1))

            bias = add.in_port(1).get_source().node
            if bias.has_valid("type") and bias.type == "Const":
                input_shape = add.in_port(0).data.get_shape()
                if len(input_shape) > 2:
                    dims_to_add = len(input_shape) - 2 if graph.graph[
                        'layout'] == 'NCHW' else 0
                    if dims_to_add > 0:
                        reshape = create_op_node_with_second_input(
                            graph, Reshape,
                            np.array([input_shape[1]] + [1] * dims_to_add,
                                     dtype=np.int64),
                            {'name': node.id + '/Dims'})
                        add.in_port(1).get_connection().set_destination(
                            reshape.in_port(0))
                        reshape.out_port(0).connect(add.in_port(1))
 def reshape_priorboxes(self, concat):
     for i, node in concat.in_nodes().items():
         reshape_node = create_op_node_with_second_input(concat.graph, Reshape, int64_array([1, -1]),
                                                         dict(name=concat.name + str(i) + '/PriorBoxReshape_'))
         node.out_port(0).disconnect()
         node.out_port(0).connect(reshape_node.in_port(0))
         concat.in_port(i).connect(reshape_node.out_port(0))
    def replace_pattern(graph: Graph, match: dict):
        node = match['fc']
        name = node.soft_get('name', node.id)
        out_size = node.soft_get('out-size', None)

        assert out_size is not None, \
            "FullyConnected should have `out-size` parameter, but it doesn't for node {}".format(name)

        in_ports = node.in_ports()

        # [I, K] * [O, K] = [I, O]
        if 1 not in in_ports or node.in_port(1).disconnected():
            # add fake weights
            input_shape = node.in_port(0).data.get_shape()
            assert input_shape is not None
            K = input_shape[-1]
            node.add_input_port(1, skip_if_exist=True)
            const = Const(graph, {'value': np.ones([out_size, K])}).create_node()
            node.in_port(1).connect(const.out_port(0))
            node.in_port(1).bin = 'weights'

        if 2 not in in_ports or node.in_port(2).disconnected():
            # add fake biases
            node.add_input_port(2, skip_if_exist=True)
            const = Const(graph, {'value': np.zeros([out_size])}).create_node()
            node.in_port(2).connect(const.out_port(0))
            node.in_port(2).bin = 'biases'

        bias_reshape = create_op_node_with_second_input(
            graph, Reshape, int64_array([-1]), {'name': name + '/1D_bias_', 'override_output_shape': True},
            node.in_port(2).get_source().node
        )
        node.in_port(2).get_connection().set_source(bias_reshape.out_port(0))
Beispiel #18
0
    def replace_pattern(self, graph: Graph, match: dict):
        y = match['maximum'].in_port(0).data.get_value()
        if y is None:
            y = match['maximum'].in_port(1).data.get_value()

        if y is None or y.shape != ():
            log.debug('The value of the "maximum_y_data" is not defined or is not constant')
            return

        # rename l2_normalize node since it will be no longer output after the transformation
        output_name = match['l2_normalize'].soft_get('name', match['l2_normalize'].id)
        normalizel2_name = output_name + '/normalizel2'
        rename_node(match['l2_normalize'], normalizel2_name)

        normalize_node = create_op_node_with_second_input(graph, NormalizeOp,
                                                          np.ones(shape=int64_array([match['input'].shape[-1]]),
                                                                  dtype=match['input'].data_type),
                                                          {'name': output_name, 'eps': y,
                                                           'across_spatial': 0, 'channel_shared': 0})
        rename_node(normalize_node, output_name)

        match['square'].in_port(0).get_source().connect(normalize_node.in_port(0))

        match['square'].in_port(0).disconnect()
        if match['l2_normalize'].in_port(0).get_source().node.id == match['rsqrt'].id:
            match['l2_normalize'].in_port(1).disconnect()
        else:
            match['l2_normalize'].in_port(0).disconnect()

        match['l2_normalize'].out_port(0).get_connection().set_source(normalize_node.out_port(0))
def add_output_in_body(node,
                       port_num,
                       cur_graph,
                       cur_max_layer_id,
                       tracks,
                       track_index,
                       add_unsqueeze=True):
    port = node.out_port(port_num)
    if add_unsqueeze:
        unsq_name = port.node.soft_get('name', port.node.id) + "/Unsqueeze"
        unsq_node = create_op_node_with_second_input(cur_graph, Unsqueeze,
                                                     int64_array([0]),
                                                     {'name': unsq_name})
        port.connect(unsq_node.in_port(0))
        unsq_node['internal_layer_id'] = cur_max_layer_id + 1
        cur_max_layer_id += 1
        tracks.insert(track_index, {'node': unsq_node, 'graph': cur_graph})
        port = unsq_node.out_port(0)

    out_name = port.node.soft_get('name', port.node.id) + ":" + str(port_num)
    res_node = Result(cur_graph, {'name': out_name}).create_node()
    port.connect(res_node.in_port(0))
    res_node['internal_layer_id'] = cur_max_layer_id + 1
    cur_max_layer_id += 1
    tracks.insert(track_index, {'node': res_node, 'graph': cur_graph})

    return res_node
Beispiel #20
0
 def find_and_replace_pattern(self, graph: Graph):
     graph.strict_mode = False
     # key is the type of the operation. The value is list of ports to convert from 0D to 1D
     rules = {
         'Broadcast': [0],
         'Unsqueeze': [1],
         'Squeeze': [1],
         'Eltwise': [1],
         'Range': [0, 1, 2],
         'FakeQuantize': [1, 2, 3, 4]
     }
     for node in graph.get_op_nodes():
         if node.has_and_set('type') and node.type in rules:
             for port in rules[node.type]:
                 if port in node.in_ports(
                 ) and not node.in_port(port).disconnected():
                     src_node = node.in_port(
                         port).get_connection().get_source().node
                     if src_node is not None and src_node.has_and_set('type') and src_node.type == 'Const' and \
                             src_node.value.ndim == 0:
                         log.info(
                             'Converting constant node "{}" from 0D to 1D'.
                             format(src_node.soft_get('name')))
                         reshape = create_op_node_with_second_input(
                             graph, Reshape, int64_array([1]),
                             {'name': src_node.id + '/Dims'})
                         src_node.out_port(0).get_connection().set_source(
                             reshape.out_port(0))
                         src_node.out_port(0).connect(reshape.in_port(0))
                         reshape.infer(reshape)
     graph.strict_mode = True
Beispiel #21
0
    def replace_pattern(self, graph: Graph, match: dict):
        node = match['pb']
        name = node.soft_get('name', node.id)

        graph.graph['cmd_params'].static_shape = False

        assert len(node.in_ports()) == 2

        begin = Const(graph, {'value': np.array([2], dtype=np.int32), 'name': name + '/ss_begin'}).create_node()
        end = Const(graph, {'value': np.array([4], dtype=np.int32), 'name': name + '/ss_end'}).create_node()
        stride = Const(graph, {'value': np.array([1], dtype=np.int32), 'name': name + '/ss_stride'}).create_node()

        shape_0 = Shape(graph, {'name': name + '/0_port'}).create_node()
        ss_0 = StridedSlice(graph, {'name': name + '/ss_0_port',
                                    'begin_mask': np.array([1], dtype=np.int32),
                                    'end_mask': np.array([0], dtype=np.int32),
                                    'new_axis_mask': np.array([0], dtype=np.int32),
                                    'shrink_axis_mask': np.array([0], dtype=np.int32),
                                    'ellipsis_mask': np.array([0], dtype=np.int32)}).create_node()

        shape_0.out_port(0).connect(ss_0.in_port(0))
        begin.out_port(0).connect(ss_0.in_port(1))
        end.out_port(0).connect(ss_0.in_port(2))
        stride.out_port(0).connect(ss_0.in_port(3))

        source = node.in_port(0).get_connection().get_source()
        node.in_port(0).disconnect()
        source.connect(shape_0.in_port(0))
        ss_0.out_port(0).connect(node.in_port(0))

        shape_1 = Shape(graph, {'name': name + '/1_port'}).create_node()
        ss_1 = StridedSlice(graph, {'name': name + '/ss_1_port',
                                    'begin_mask': np.array([1], dtype=np.int32),
                                    'end_mask': np.array([0], dtype=np.int32),
                                    'new_axis_mask': np.array([0], dtype=np.int32),
                                    'shrink_axis_mask': np.array([0], dtype=np.int32),
                                    'ellipsis_mask': np.array([0], dtype=np.int32)}).create_node()

        shape_1.out_port(0).connect(ss_1.in_port(0))
        begin.out_port(0).connect(ss_1.in_port(1))
        end.out_port(0).connect(ss_1.in_port(2))
        stride.out_port(0).connect(ss_1.in_port(3))

        source = node.in_port(1).get_connection().get_source()
        node.in_port(1).disconnect()
        source.connect(shape_1.in_port(0))
        ss_1.out_port(0).connect(node.in_port(1))

        ss_0['force_precision_in_ports'] = {1: 'int64', 2: 'int64', 3: 'int64'}
        ss_1['force_precision_in_ports'] = {1: 'int64', 2: 'int64', 3: 'int64'}

        node['need_shape_inference'] = True
        node['override_output_shape'] = True
        node['V10_infer'] = True
        unsqueeze = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]), {'name': name + '/unsqueeze'})
        naked_priorbox_name = name + '/naked_not_unsqueezed'
        rename_nodes([(node, naked_priorbox_name), (unsqueeze, name)])

        node.out_port(0).get_connection().set_source(unsqueeze.out_port(0))
        node.out_port(0).connect(unsqueeze.in_port(0))
Beispiel #22
0
    def insert_transpose(node, in_port_idx):
        graph = node.graph
        name = node.soft_get('name', node.id)

        assert in_port_idx in node.in_ports() and not node.in_port(in_port_idx).disconnected(), \
            'Input port with index {} should be connected for node {}'.format(in_port_idx, name)

        in_port = node.in_port(in_port_idx)
        port_shape = in_port.data.get_shape()
        assert port_shape is not None, \
            'Shape is unknown for input port with index {} for node {}'.format(in_port_idx, name)

        transpose_order = list(range(port_shape.size))
        transpose_order[-1], transpose_order[-2] = transpose_order[
            -2], transpose_order[-1]

        transpose = create_op_node_with_second_input(
            graph, Transpose, int64_array(transpose_order),
            {'name': name + '/{}_port_transpose'.format(in_port_idx)})

        port_source = in_port.get_source()
        in_port.get_connection().set_source(transpose.out_port(0))
        transpose.in_port(0).connect(port_source)

        transpose['override_output_shape'] = True
Beispiel #23
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['proposal']
        assert len(node.in_ports()) == 3, "Proposal op must have exactly 3 input ports"
        im_info_shape = node.in_port(2).data.get_shape()
        assert im_info_shape is not None

        if np.array_equal(im_info_shape, [1, 6]):
            log.error('The model contains Proposal layer "{}" with input of shape [1, 6]. Inference Engine '
                      'implementation of the Proposal layer uses only 4 first values (indices 0, 1, 2 and 3). '
                      'Elements with indices 4 and 5 will be ignored.'.format(node.soft_get('name', node.id)),
                      extra={'is_warning': True})

            cropped_im_info = create_op_with_const_inputs(graph, StridedSlice, {1: np.array([0, 0], dtype=np.int32),
                                                                                2: np.array([1, 3], dtype=np.int32),
                                                                                3: np.array([1, 1], dtype=np.int32)},
                                                          {'name': 'cropped_im_info',
                                                           'begin_mask': int64_array([1, 1]),
                                                           'end_mask': int64_array([1, 1]),
                                                           'new_axis_mask': int64_array([0, 0]),
                                                           'shrink_axis_mask': int64_array([0, 0]),
                                                           'ellipsis_mask': int64_array([0, 0]),
                                                           'override_output_shape': True,
                                                           })

            node.in_port(2).get_connection().insert_node(cropped_im_info)

            # update the im_info_shape so the next 'if' statement become true
            im_info_shape = int64_array([1, 3])

        if np.array_equal(im_info_shape, [1, 3]) or np.array_equal(im_info_shape, [1, 4]):
            reshape = create_op_node_with_second_input(graph, Reshape, [im_info_shape[1]], {'name': 'im_info/Reshape'})
            node.in_port(2).get_connection().set_destination(reshape.in_port(0))
            reshape.out_port(0).connect(node.in_port(2))
Beispiel #24
0
    def replace_op(self, graph: Graph, node: Node):
        name = node.soft_get('name', node.id)
        axis = node.soft_get('axis', 0)

        rename_node(node=node, name=name + '/to_be_removed')
        cumsum_node = create_op_node_with_second_input(graph, CumSum,
                                                       int64_array(axis), {
                                                           'name': name,
                                                           'reverse': False,
                                                           'exclusive': False
                                                       })
        rename_node(cumsum_node, name)

        node.in_port(0).get_connection().set_destination(
            cumsum_node.in_port(0))
        if node.has_valid('mx_out_type') and node['mx_out_type'] is not None:
            rename_node(node=cumsum_node, name=name + '/CumSum')
            convert = Cast(graph, {
                'name': name,
                'dst_type': node['mx_out_type']
            }).create_node()
            rename_node(convert, name)
            cumsum_node.out_port(0).connect(convert.in_port(0))
            return [convert.id]
        else:
            return [cumsum_node.id]
Beispiel #25
0
def resolve_convolution_with_group(node: Node, group: int, ir_version: str):
    input_shape = node.in_port(0).data.get_shape()
    assert len(input_shape) in [3, 4, 5]

    weights_shape = node.in_port(1).data.get_shape()
    assert weights_shape is not None
    assert len(weights_shape) in [3, 4, 5]
    assert weights_shape[0] % group == 0

    if ir_version == 'V7':
        if weights_shape[0] == node.output:
            # weights are already is in [G*O I X Y] format
            return
        new_shape = shape_array([node.output, -1, *weights_shape[2:]])
    elif ir_version == 'V10':
        # TODO rewrite this transformation to generate a shape-computing sub-graph. Ticket 62076
        I = input_shape[1]
        new_shape = shape_array(
            [group, node.output // group, I // group, *weights_shape[2:]])
        assert is_fully_defined(weights_shape[2:]) and is_fully_defined(I) and \
               np.prod(weights_shape) == np.prod(new_shape), 'Initial weights shape {}, grouped weights shape {}' \
                                                             ''.format(weights_shape, new_shape)
        del node['group']
        node['type'] = 'GroupConvolution'
    else:
        raise Error("Unknown IR version: {}".format(ir_version))

    reshape = create_op_node_with_second_input(node.graph, Reshape,
                                               int64_array(new_shape),
                                               {'override_output_shape': True})

    node.in_port(1).get_connection().insert_node(reshape)
Beispiel #26
0
 def replace_sub_graph(self, graph: Graph, match: dict):
     argmax_node = match['op']
     if not argmax_node.has_valid('axis'):
         flatten_node = create_op_node_with_second_input(graph, Reshape, int64_array([0, 1, -1]),
                                                         dict(name=argmax_node.name + '/Flatten'))
         argmax_node.in_port(0).get_connection().insert_node(flatten_node)
         argmax_node.axis = 2
    def replace_pattern(graph: Graph, match: dict):
        node = match['conv']
        node_name = node.soft_get('name', node.id)

        # create Reshape before convolution
        # shape = [in_shape[0], in_shape[1]/patch_stride, 1, patch_stride]
        i_shape = Shape(graph, {'name': node_name + '/Shape'}).create_node()
        shape = Cast(
            graph, {
                'name':
                node_name + '/to_float',
                'dst_type':
                data_type_str_to_np(graph.graph['cmd_params'].data_type)
            }).create_node()
        i_shape.in_port(0).connect(node.in_port(0).get_source())
        shape.in_port(0).connect(i_shape.out_port(0))

        N, H = node_to_get_shape_value_of_indices(
            shape, [0]), node_to_get_shape_value_of_indices(shape, [1])

        div = create_op_with_const_inputs(
            graph, Div, {1: float_array([node.patch_stride])},
            {'name': node_name + '/div_stride_h'})
        div.in_port(0).connect(H.out_port(0))

        concat = create_op_with_const_inputs(
            graph, Concat, {
                2: float_array([1]),
                3: float_array([node.patch_stride])
            }, {
                'name': node_name + '/concat_all_dims',
                'in_ports_count': 4,
                'axis': 0
            })
        concat.in_port(0).connect(N.out_port(0))
        concat.in_port(1).connect(div.out_port(0))

        reshape_pattern = Cast(graph, {
            'name': node_name + '/to_int',
            'dst_type': np.int64
        }).create_node()
        concat.out_port(0).connect(reshape_pattern.in_port(0))

        reshape_in = Reshape(graph, {
            'name': node_name + '/reshape_in'
        }).create_node()
        reshape_in.in_port(1).connect(reshape_pattern.out_port(0))

        # create Reshape after Convolution
        reshape_out = create_op_node_with_second_input(
            graph, Reshape, int64_array([0, -1]),
            {'name': node_name + '/reshape_out'})

        # connect input_reshape_node
        source = node.in_port(0).get_source()
        node.in_port(0).get_connection().set_source(reshape_in.out_port(0))
        reshape_in.in_port(0).connect(source)
        # connect output_reshape_node
        node.out_port(0).get_connection().set_source(reshape_out.out_port(0))
        node.out_port(0).connect(reshape_out.in_port(0))
Beispiel #28
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        source_connection = match['split'].in_port(0).get_connection()
        source_node = source_connection.get_source().node
        cast_node = match['cast']

        range_node = Range(graph, {
            'name': source_node.id + '/Range'
        }).create_node()
        start_node = Const(graph, {
            'name': range_node.id + '/Start',
            'value': int64_array(0)
        }).create_node()

        step_node = Const(graph, {
            'name': range_node.id + '/Step',
            'value': int64_array(1)
        }).create_node()
        input_shape_node = Shape(graph, {
            'name': start_node.id + '/Shape'
        }).create_node()
        input_shape_node.in_port(0).connect(source_node.out_port(0))

        limit_node_1D = node_to_get_batch_value(input_shape_node)
        limit_node = create_op_node_with_second_input(
            graph, Squeeze, int64_array([0]),
            {'name': source_node.id + '/batch_0D_value'}, limit_node_1D)

        range_node.in_port(0).connect(start_node.out_port(0))
        range_node.in_port(1).connect(limit_node.out_port(0))
        range_node.in_port(2).connect(step_node.out_port(0))
        cast_node.out_port(0).get_connection().set_source(
            range_node.out_port(0))

        graph.remove_nodes_from([node.id for node in match.values()])
Beispiel #29
0
    def replace_pattern(self, graph: Graph, match: dict):
        node = match['node']

        group = node.group
        assert group > 1

        weights_shape = node.in_port(1).data.get_shape()
        assert weights_shape is not None
        assert weights_shape[0] % group == 0
        I = node.in_port(0).data.get_shape()[1]

        new_shape = int64_array(
            [group, node.output / group, I / group, *weights_shape[2:]])

        assert np.prod(weights_shape) == np.prod(new_shape), \
            'Initial weights shape {}, grouped weights shape {}'.format(weights_shape, new_shape)

        del node['group']
        node['type'] = 'GroupConvolution'

        reshape = create_op_node_with_second_input(
            graph, Reshape, int64_array(new_shape),
            {'override_output_shape': True})

        node.in_port(1).get_connection().insert_node(reshape)
Beispiel #30
0
    def replace_pattern(self, graph: Graph, match: dict):
        FQ = match['FQ']
        reshape = match['reshape']
        conv = match['node']

        rank_reshape = reshape.in_port(0).data.get_shape().size != reshape.out_port(0).data.get_shape().size

        if not all([np.prod(FQ.in_port(i).data.get_shape()) == 1 for i in range(1, 5)]):
            #  FakeQuantize has limits with multiple values, that should be reshaped too
            #  Pulling Reshape through such FQ is a complex procedure because of broadcasting rules
            return

        new_rank = reshape.out_port(0).data.get_shape().size

        reshape.in_port(0).disconnect()
        reshape.out_port(0).disconnect()

        FQ.out_port(0).connect(conv.in_port(1))
        FQ.in_port(0).get_connection().insert_node(reshape)

        reshape['need_shape_inference'] = True
        reshape['override_output_shape'] = True
        FQ['need_shape_inference'] = True
        FQ['override_output_shape'] = True

        if rank_reshape:
            # force rank of limit inputs to match 0-input rank
            # reshaping to lower range needs it the most due to FQ inner broadcast semantics
            for i in range(1, 5):
                reshape = create_op_node_with_second_input(graph, Reshape, int64_array([1] * new_rank),
                                                           {'override_output_shape': True})
                FQ.in_port(i).get_connection().insert_node(reshape)