예제 #1
0
    def replace_pattern(self, graph: Graph, match: Dict[str, Node]):
        concat_node = match['concat']
        concat_node['axis'] = 1
        concat_name = concat_node.soft_get('name', concat_node.id)

        concat_reshape = create_op_node_with_second_input(graph, Reshape, int64_array([1, 2, -1]), op_attrs=dict(
            name=concat_name + '/Reshape'))
        split_node = create_op_node_with_second_input(graph, Split, int64_array(1), op_attrs=dict(
            name=concat_name + '/Split', num_splits=2), input_node=concat_reshape)
        split_node_reshape = create_op_node_with_second_input(graph, Reshape, int64_array([-1, 4]), op_attrs=dict(
            name=split_node.name + '/Reshape'))
        split_node.out_port(0).connect(split_node_reshape.in_port(0))
        value = create_op_node_with_second_input(graph, Split, int64_array(1), op_attrs=dict(
            name=split_node_reshape.name + '/Split', num_splits=4), input_node=split_node_reshape)

        xmin, xmax = calculate_prior_box_value(value, value_to_div=value.out_port(2), value_to_add=value.out_port(0))
        ymin, ymax = calculate_prior_box_value(value, value_to_div=value.out_port(3), value_to_add=value.out_port(1))

        concat_slice_value = Concat(graph, dict(name=value.name + '/Concat', in_ports_count=4, axis=1)).create_node()
        for ind, node in enumerate([xmin, ymin, xmax, ymax]):
            concat_slice_value.in_port(ind).connect(node.out_port(0))

        reshape_concat_values = create_op_node_with_second_input(graph, Reshape, int64_array([1, 1, -1]),
                                                                 op_attrs=dict(name=concat_slice_value.name + '/Reshape'),
                                                                 input_node=concat_slice_value)
        concat = Concat(graph, dict(name=reshape_concat_values.name + '/Concat', in_ports_count=2, axis=1)).create_node()
        concat.in_port(0).connect(reshape_concat_values.out_port(0))
        concat.in_port(1).connect(split_node.out_port(1))

        match['detection_output'].in_port(2).get_connection().set_source(concat.out_port(0))
        concat_node.out_port(0).get_connection().set_destination(concat_reshape.in_port(0))
    def add_leading_and_trailing_reshape(graph: Graph, nodes: list):
        """
        When the first operation in the matched list is the Transpose then add the Reshape operation which reshapes to the
        Transpose input shape. This Reshape op is needed for the optimization pass. If the optimization will not be
        applied then this dummy Reshape will be removed by the "RemoveRedundantReshapes" pass.

        :param graph: the graph with nodes
        :param nodes: the sequence of Transpose and ReshapeFF nodes
        :return: None
        """
        # add leading Reshape
        if nodes[0].type == 'Transpose':
            dummy_reshape_node = create_op_node_with_second_input(
                graph, Reshape, nodes[0].in_port(0).data.get_shape().copy(),
                {'name': nodes[0].in_port(0).get_connection().get_source().node.id + '/Reshape'})
            dummy_reshape_node[__class__.OPTIMIZED_NODE_FLAG] = True
            nodes[0].in_port(0).get_connection().insert_node(dummy_reshape_node)
            nodes.insert(0, dummy_reshape_node)
            log.debug('Added Reshape op "{}" in the beginning of the permute-reshape sequence'.format(
                dummy_reshape_node.soft_get('name')))

        # similarly add the Reshape op after the last Transpose op which reshapes to the Transpose output shape
        if nodes[-1].type == 'Transpose':
            dummy_reshape_node = create_op_node_with_second_input(
                graph, Reshape, nodes[-1].out_port(0).data.get_shape().copy(),
                {'name': nodes[0].out_port(0).get_connection().get_destination().node.id + '/Reshape'})
            dummy_reshape_node[__class__.OPTIMIZED_NODE_FLAG] = True
            nodes[-1].out_port(0).get_connection().insert_node(dummy_reshape_node)
            nodes.append(dummy_reshape_node)
            log.debug('Added Reshape op "{}" in the end of the permute-reshape sequence'.format(
                dummy_reshape_node.soft_get('name')))
예제 #3
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['op']
        shape = node.in_port(0).data.get_shape().copy()

        assert shape[1] % node.group == 0

        power_node = create_op_node_with_second_input(
            graph, Pow, node.p, {'name': node.id + '_power'})

        reshape_node = create_op_node_with_second_input(
            graph, Reshape,
            int64_array([shape[0], shape[1] / node.group, node.group]),
            {'name': node.id + '_reshape'})
        reshape_node.in_port(0).connect(power_node.out_port(0))

        reducesum_node = create_op_node_with_second_input(
            graph, ReduceSum, int64_array([2]), {
                'name': node.id + '_sum',
                'keep_dims': False
            })
        reducesum_node.in_port(0).connect(reshape_node.out_port(0))

        invpower_node = create_op_node_with_second_input(
            graph, Pow, 1.0 / node.p, {'name': node.id + '_invpower'})

        invpower_node.in_port(0).connect(reducesum_node.out_port(0))

        node.in_port(0).get_connection().set_destination(power_node.in_port(0))
        node.out_port(0).get_connection().set_source(invpower_node.out_port(0))
    def find_and_replace_pattern(self, graph: Graph):
        for nms in graph.get_op_nodes(op='NonMaxSuppression'):
            # prepare inputs to the NonMaximumSuppression Node
            unsqueeze_boxes = create_op_node_with_second_input(
                graph, Unsqueeze, int64_array([0]),
                {'name': nms.soft_get('name') + '/Unsqueeze_0'})
            nms.in_port(0).get_connection().insert_node(unsqueeze_boxes)

            unsqueeze_box_scores = create_op_node_with_second_input(
                graph, Reshape, int64_array([1, 1, -1]),
                {'name': nms.soft_get('name') + '/Unsqueeze_1'})
            nms.in_port(1).get_connection().insert_node(unsqueeze_box_scores)

            nms_name = nms.soft_get('name', nms.id)

            # prepare output #0
            crop_box_indices_name = nms_name + '/Crop_boxes_'
            crop_box_indices = Crop(
                graph, {
                    'name': crop_box_indices_name,
                    'axis': int64_array([1]),
                    'offset': int64_array([2]),
                    'dim': int64_array([1])
                }).create_node()
            nms.out_port(0).get_connection().insert_node(crop_box_indices)
            squeeze_output_boxes = create_op_node_with_second_input(
                graph, Squeeze, int64_array([1]),
                {'name': crop_box_indices_name + '/Squeeze'})
            crop_box_indices.out_port(0).get_connection().insert_node(
                squeeze_output_boxes)

            num_of_outputs = len([
                port for port in nms.out_ports().values()
                if not port.disconnected()
            ])

            if num_of_outputs == 1:
                continue

            # prepare output #1
            crop_score_indices_name = nms_name + '/Crop_scores_'
            crop_score_indices = Crop(
                graph, {
                    'name': crop_score_indices_name,
                    'axis': int64_array([1]),
                    'offset': int64_array([2]),
                    'dim': int64_array([1])
                }).create_node()
            nms.out_port(1).get_connection().insert_node(crop_score_indices)
            squeeze_output_scores = create_op_node_with_second_input(
                graph, Squeeze, int64_array([1]),
                {'name': crop_score_indices_name + '/Squeeze'})
            crop_score_indices.out_port(0).get_connection().insert_node(
                squeeze_output_scores)
예제 #5
0
    def find_and_replace_pattern(self, graph: Graph):
        reverse_nodes = graph.get_op_nodes(op='Reverse')
        for reverse in reverse_nodes:
            reverse_name = reverse.soft_get('name', reverse.id)

            assert reverse.in_port(1).disconnected()
            assert reverse.has_valid('axis')

            in_shape_rank = len(reverse.in_port(0).data.get_shape())
            # 1. Add new dimension as batch for rank = 1 to have batch != seq_axis
            if in_shape_rank == 1:
                unsq_node = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
                                                             {'name': reverse_name+"/Unsqueeze"})
                reverse.in_port(0).get_source().connect(unsq_node.in_port(0))
                new_in = unsq_node.out_port(0)
                batch_axis = 0
                seq_axis = 1
            else:
                new_in = reverse.in_port(0).get_source()
                seq_axis = reverse['axis']
                batch_axis = 0 if seq_axis != 0 else 1

            # 2. For ReverseSequence 1-port input is seq_lengths => create this input node as
            # shape[seq_axis] broadcasted to shape[batch_axis]
            # in ---> ShapeOf ----> Gather(seq_axis)  ----> Broadcast----->
            #            |                                      |
            #            | -------> Gather(batch_axis)----------|
            shape_node = Shape(graph, {'name': reverse_name + "/Shape"}).create_node()
            new_in.connect(shape_node.in_port(0))
            seq_axis_node = node_to_get_shape_value_of_indices(shape_node, [seq_axis])
            batch_node = node_to_get_shape_value_of_indices(shape_node, [batch_axis])
            broadcast_node = Broadcast(graph, {'name': reverse_name + "/Broadcast"}).create_node()
            broadcast_node.in_port(0).connect(seq_axis_node.out_port(0))
            broadcast_node.in_port(1).connect(batch_node.out_port(0))

            # 3. Create new ReverseSequence node and reconnect all inputs/outputs to it
            rename_node(reverse, reverse_name + '/to_delete')
            reverse_sequence = ReverseSequence(graph, {'name':  reverse_name, 'seq_axis': seq_axis,
                                                       'batch_axis': batch_axis}).create_node()
            reverse_sequence.in_port(0).connect(new_in)
            reverse_sequence.in_port(1).connect(broadcast_node.out_port(0))

            # 4. remove added dimension for rank = 1
            if in_shape_rank == 1:
                rename_node(reverse_sequence, reverse_name + '/ReverseSequence')
                squeeze_node = create_op_node_with_second_input(graph, Squeeze, int64_array([0]),
                                                                {'name': reverse_name})
                squeeze_node.in_port(0).connect(reverse_sequence.out_port(0))
                reverse.out_port(0).get_connection().set_source(squeeze_node.out_port(0))
            else:
                reverse.out_port(0).get_connection().set_source(reverse_sequence.out_port(0))

        # 5. Delete old Reverse node
        graph.remove_nodes_from([reverse.id for reverse in reverse_nodes])
예제 #6
0
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_op_nodes(op='LayerNorm'):
            node_name = node.soft_get('name', node.id)

            if node.output_mean_var is True:
                if not node.out_port(1).disconnected() or not node.out_port(2).disconnected():
                    raise Error("Node {} is supported with only one output".format(node_name))
                log.error('LayerNorm node {} with attribute "output_mean_var" = True is not supported.'
                          'But since the node has one output, the conversion will continue.'.format(node_name),
                          extra={'is_warning': True})

            input_shape = node.in_port(0).data.get_shape()
            assert node.has_valid('axis'), 'Incorrect axis value for the node {}'.format(node_name)
            axis = node.axis

            mvn = create_op_node_with_second_input(graph, MVN, int64_array([axis]),
                                                   dict(eps=node.epsilon, name=node_name + '/LayerNorm/MVN_',
                                                        across_channels=1, normalize_variance=1, eps_mode='inside_sqrt'))

            mul = Mul(graph, {'name': node_name + '/LayerNorm/mul_'}).create_node()
            add = Add(graph, {'name': mul.name + '/LayerNorm/add_'}).create_node()

            node.in_port(0).get_connection().set_destination(mvn.in_port(0))
            node.in_port(1).get_connection().set_destination(mul.in_port(1))
            node.in_port(2).get_connection().set_destination(add.in_port(1))

            mvn.out_port(0).connect(mul.in_port(0))
            mul.out_port(0).connect(add.in_port(0))
            node.out_port(0).get_connection().set_source(add.out_port(0))

            # MXNet LayerNorm gamma and beta attributes are 1D tensors with shape = [input_shape[axis]]
            # We have to unsqueeze values for Mul and Add operations to avoid shapes incompatibility problems
            # if axis != -1
            canonical_axis = get_canonical_axis_index(input_shape, axis)
            unsqueeze_value = []
            for idx, val in enumerate(input_shape):
                if idx != canonical_axis:
                    unsqueeze_value.append(idx)

            mul_const_unsqueeze = create_op_node_with_second_input(graph, Unsqueeze,
                                                                   int64_array(unsqueeze_value),
                                                                   dict(name=mul.name + '/Unsqueeze',
                                                                        override_output_shape=True))
            add_const_unsqueeze = create_op_node_with_second_input(graph, Unsqueeze,
                                                                   int64_array(unsqueeze_value),
                                                                   dict(name=add.name + '/Unsqueeze',
                                                                        override_output_shape=True))

            mul.in_port(1).get_connection().insert_node(mul_const_unsqueeze)
            add.in_port(1).get_connection().insert_node(add_const_unsqueeze)

            rename_nodes([(node, node_name + '/ShouldBeDeleted'), (add, node_name)])
예제 #7
0
    def squeeze_initial_states(graph: Graph, match: dict):
        """
        Squeeze input initial states of recurrent node to 2-D shape.
        """
        hidden_init_port = 5
        cell_init_port = 6

        rnn_layer = match['rnn_layer']
        # Add input ports to rnn_layer
        rnn_layer.add_sequence_of_ports(type='in', rng=range(7))
        rnn_layer_name = rnn_layer.soft_get('name', rnn_layer.id)

        assert hidden_init_port in rnn_layer.in_nodes()
        hidden_size = rnn_layer.hidden_size
        shape = Shape(graph, dict(name=rnn_layer_name + '/ShapeOf')).create_node()
        rnn_layer.in_port(0).get_source().connect(shape.in_port(0))

        batch = node_to_get_shape_value_of_indices(shape, int64_array([rnn_layer.batch_dim]))
        new_dim = create_op_node_with_second_input(graph, Concat, second_input_value=int64_array([hidden_size]),
                                                   op_attrs=dict(name=rnn_layer_name + '/HiddenStateResizeDim',
                                                                 in_ports_count=2, axis=0), input_node=batch)
        reshape_h = Reshape(graph, dict(name=rnn_layer_name + '/HiddenStateResize', override_output_shape=True)).create_node()
        new_dim.out_port(0).connect(reshape_h.in_port(1))
        rnn_layer.in_port(hidden_init_port).get_connection().insert_node(reshape_h)

        if rnn_layer.op == 'LSTM':
            assert cell_init_port in rnn_layer.in_nodes()

            reshape_c = Reshape(graph, dict(name=rnn_layer_name + '/CellStateResize', override_output_shape=True)).create_node()
            new_dim.out_port(0).connect(reshape_c.in_port(1))
            rnn_layer.in_port(cell_init_port).get_connection().insert_node(reshape_c)
예제 #8
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['result']

        reshape = create_op_node_with_second_input(graph, Reshape, int64_array([]), {'override_output_shape': True})
        node.in_port(1).get_connection().insert_node(reshape)

        TopKNormalizer.normalize_outputs(node)
예제 #9
0
def add_output_in_body(node,
                       port_num,
                       cur_graph,
                       cur_max_layer_id,
                       tracks,
                       track_index,
                       add_unsqueeze=True):
    port = node.out_port(port_num)
    if add_unsqueeze:
        unsq_name = port.node.soft_get('name', port.node.id) + "/Unsqueeze"
        unsq_node = create_op_node_with_second_input(cur_graph, Unsqueeze,
                                                     int64_array([0]),
                                                     {'name': unsq_name})
        port.connect(unsq_node.in_port(0))
        unsq_node['internal_layer_id'] = cur_max_layer_id + 1
        cur_max_layer_id += 1
        tracks.insert(track_index, {'node': unsq_node, 'graph': cur_graph})
        port = unsq_node.out_port(0)

    out_name = port.node.soft_get('name', port.node.id) + ":" + str(port_num)
    res_node = Result(cur_graph, {'name': out_name}).create_node()
    port.connect(res_node.in_port(0))
    res_node['internal_layer_id'] = cur_max_layer_id + 1
    cur_max_layer_id += 1
    tracks.insert(track_index, {'node': res_node, 'graph': cur_graph})

    return res_node
예제 #10
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['proposal']
        assert len(node.in_ports()) == 3, "Proposal op must have exactly 3 input ports"
        im_info_shape = node.in_port(2).data.get_shape()
        assert im_info_shape is not None

        if np.array_equal(im_info_shape, [1, 6]):
            log.error('The model contains Proposal layer "{}" with input of shape [1, 6]. Inference Engine '
                      'implementation of the Proposal layer uses only 4 first values (indices 0, 1, 2 and 3). '
                      'Elements with indices 4 and 5 will be ignored.'.format(node.soft_get('name', node.id)),
                      extra={'is_warning': True})

            cropped_im_info = create_op_with_const_inputs(graph, StridedSlice, {1: np.array([0, 0], dtype=np.int32),
                                                                                2: np.array([1, 3], dtype=np.int32),
                                                                                3: np.array([1, 1], dtype=np.int32)},
                                                          {'name': 'cropped_im_info',
                                                           'begin_mask': int64_array([1, 1]),
                                                           'end_mask': int64_array([1, 1]),
                                                           'new_axis_mask': int64_array([0, 0]),
                                                           'shrink_axis_mask': int64_array([0, 0]),
                                                           'ellipsis_mask': int64_array([0, 0]),
                                                           'override_output_shape': True,
                                                           })

            node.in_port(2).get_connection().insert_node(cropped_im_info)

            # update the im_info_shape so the next 'if' statement become true
            im_info_shape = int64_array([1, 3])

        if np.array_equal(im_info_shape, [1, 3]) or np.array_equal(im_info_shape, [1, 4]):
            reshape = create_op_node_with_second_input(graph, Reshape, [im_info_shape[1]], {'name': 'im_info/Reshape'})
            node.in_port(2).get_connection().set_destination(reshape.in_port(0))
            reshape.out_port(0).connect(node.in_port(2))
예제 #11
0
    def replace_op(self, graph: Graph, node: Node):
        name = node.soft_get('name', node.id)
        axis = node.soft_get('axis', 0)

        rename_node(node=node, name=name + '/to_be_removed')
        cumsum_node = create_op_node_with_second_input(graph, CumSum,
                                                       int64_array(axis), {
                                                           'name': name,
                                                           'reverse': False,
                                                           'exclusive': False
                                                       })
        rename_node(cumsum_node, name)

        node.in_port(0).get_connection().set_destination(
            cumsum_node.in_port(0))
        if node.has_valid('mx_out_type') and node['mx_out_type'] is not None:
            rename_node(node=cumsum_node, name=name + '/CumSum')
            convert = Cast(graph, {
                'name': name,
                'dst_type': node['mx_out_type']
            }).create_node()
            rename_node(convert, name)
            cumsum_node.out_port(0).connect(convert.in_port(0))
            return [convert.id]
        else:
            return [cumsum_node.id]
예제 #12
0
def resolve_convolution_with_group(node: Node, group: int, ir_version: str):
    input_shape = node.in_port(0).data.get_shape()
    assert len(input_shape) in [3, 4, 5]

    weights_shape = node.in_port(1).data.get_shape()
    assert weights_shape is not None
    assert len(weights_shape) in [3, 4, 5]
    assert weights_shape[0] % group == 0

    if ir_version == 'V7':
        if weights_shape[0] == node.output:
            # weights are already is in [G*O I X Y] format
            return
        new_shape = shape_array([node.output, -1, *weights_shape[2:]])
    elif ir_version == 'V10':
        # TODO rewrite this transformation to generate a shape-computing sub-graph. Ticket 62076
        I = input_shape[1]
        new_shape = shape_array(
            [group, node.output // group, I // group, *weights_shape[2:]])
        assert is_fully_defined(weights_shape[2:]) and is_fully_defined(I) and \
               np.prod(weights_shape) == np.prod(new_shape), 'Initial weights shape {}, grouped weights shape {}' \
                                                             ''.format(weights_shape, new_shape)
        del node['group']
        node['type'] = 'GroupConvolution'
    else:
        raise Error("Unknown IR version: {}".format(ir_version))

    reshape = create_op_node_with_second_input(node.graph, Reshape,
                                               int64_array(new_shape),
                                               {'override_output_shape': True})

    node.in_port(1).get_connection().insert_node(reshape)
예제 #13
0
def calculate_prior_box_value(value: Node, value_to_div: Port,
                              value_to_add: Port):
    """
    :param value: Node with value. Here is supposed the node with op='Split'
    :param value_to_div: Output port with values to be divided by 2
    :param value_to_add: Output port with values to be added to values from value_to_div port
    :return: Sub and Add nodes

    The sub-graph can be described by formulas:
    min = value[value_to_add] - (value[value_to_div] / 2)
    max = value[value_to_add] + (value[value_to_div] / 2)
    """
    graph = value.graph
    dtype = data_type_str_to_np(graph.graph['cmd_params'].data_type)
    _min = Sub(graph, dict(name=value.name + '/Sub')).create_node()
    div = create_op_node_with_second_input(graph,
                                           Div,
                                           mo_array([2], dtype=dtype),
                                           op_attrs=dict(name=value.name +
                                                         '/Div'))
    div.in_port(0).connect(value_to_div)
    _min.in_port(0).connect(value_to_add)
    _min.in_port(1).connect(div.out_port(0))

    _max = Add(graph, dict(name=value.name + '/Add')).create_node()
    _max.in_port(0).connect(div.out_port(0))
    _max.in_port(1).connect(value_to_add)

    return _min, _max
예제 #14
0
    def insert_reduce(self,
                      model_graph,
                      insert_op,
                      node,
                      granularity,
                      type_stat,
                      node_name,
                      axis=1):
        axis_const = self.find_axis(node, granularity, axis)
        if isinstance(axis_const, str):
            return (True, node.name)

        out_port = self.get_out_port(node_name)
        if out_port is not None:
            node_name = f'{node_name[0]}.{out_port}'
        reduce_op = create_op_node_with_second_input(
            node.graph, insert_op, int64_array(axis_const),
            dict(name=f'{type_stat}_{node_name}'))
        reduce_op['fullname'] = reset_node_fullname(node.fullname,
                                                    reduce_op.name)
        if node.graph != model_graph:
            Op.create_data_node(reduce_op.graph, reduce_op, {'shape': [1]})

        node.out_port(out_port if out_port else 0).connect(
            reduce_op.in_port(0))
        return self.insert_result(model_graph, node, reduce_op, type_stat,
                                  out_port)
    def replace_sub_graph(self, graph: Graph, match: dict):
        """
        Need to find the pattern: SoftmaxActivation -> DetectionOutput
        DetectionOutput in IE expects flattened input from SoftMax, that is why there is the need to add
        Flatten layer

        Parameters
        ----------
        graph : Graph
           Graph with loaded model.
         match : dict
           Patterns which were found in graph structure.
        """
        softmax_activation = match['softmax_activation']
        multi_box_detection = match['multi_box_detection']
        softmax_activation['axis'] = -1
        edge_data = graph.get_edge_data(softmax_activation.id,
                                        multi_box_detection.id)
        out_port = edge_data[0]['out']
        in_port = edge_data[0]['in']
        graph.remove_edge(softmax_activation.id, multi_box_detection.id)
        new_reshape_node = create_op_node_with_second_input(
            graph, Reshape, int64_array([0, -1]),
            dict(op='Reshape', name=multi_box_detection.name + '/Reshape_'),
            softmax_activation)
        graph.create_edge(new_reshape_node,
                          multi_box_detection,
                          in_port=in_port,
                          out_port=out_port)
예제 #16
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['op']
        if node.has_port('in', 2) and not node.in_port(
                2).disconnected() and not node.has_and_set('shape_input'):
            bias_name = node.name
            new_node_name = node.name + '/WithoutBiases'
            add = Add(graph, dict(name=bias_name)).create_node()
            rename_nodes([(node, new_node_name), (add, bias_name)])
            node.out_port(0).get_connection().set_source(add.out_port(0))
            node.out_port(0).connect(add.in_port(0))
            node.in_port(2).get_connection().set_destination(add.in_port(1))

            bias = add.in_port(1).get_source().node
            if bias.has_valid("type") and bias.type == "Const":
                input_shape = add.in_port(0).data.get_shape()
                if len(input_shape) > 2:
                    dims_to_add = len(input_shape) - 2 if graph.graph[
                        'layout'] == 'NCHW' else 0
                    if dims_to_add > 0:
                        reshape = create_op_node_with_second_input(
                            graph, Reshape,
                            int64_array([input_shape[1]] + [1] * dims_to_add),
                            {'name': node.id + '/Dims'})
                        add.in_port(1).get_connection().set_destination(
                            reshape.in_port(0))
                        reshape.out_port(0).connect(add.in_port(1))
    def find_and_replace_pattern(self, graph: Graph):
        for roll_node in graph.get_op_nodes(op='Roll'):
            if not roll_node.in_port(2).disconnected():
                return
            node_name = roll_node.soft_get('name', roll_node.id)

            # reshape to 1d tensor
            reshape_to_1d = create_op_node_with_second_input(
                graph, Reshape, int64_array([-1]),
                {'name': node_name + '/reshape'})
            roll_node.in_port(0).get_connection().insert_node(reshape_to_1d)

            # add zero const as axes input to roll
            const_zero = Const(graph, {
                'value': int64_array([0]),
                'name': node_name + '/axes'
            }).create_node()
            const_zero.out_port(0).connect(roll_node.in_port(2))

            # reshape to original shape
            shape_of = Shape(graph, {
                'name': node_name + '/shape_of'
            }).create_node()
            reshape_to_1d.in_port(0).get_connection().add_destination(
                shape_of.in_port(0))
            reshape_to_orig_shape = Reshape(graph, {}).create_node()
            rename_nodes([(roll_node, node_name + '/roll'),
                          (reshape_to_orig_shape, node_name)])
            shape_of.out_port(0).connect(reshape_to_orig_shape.in_port(1))
            roll_node.out_port(0).get_connection().insert_node(
                reshape_to_orig_shape)
예제 #18
0
    def insert_transpose(node, in_port_idx):
        graph = node.graph
        name = node.soft_get('name', node.id)

        assert in_port_idx in node.in_ports() and not node.in_port(in_port_idx).disconnected(), \
            'Input port with index {} should be connected for node {}'.format(in_port_idx, name)

        in_port = node.in_port(in_port_idx)
        port_shape = in_port.data.get_shape()
        assert port_shape is not None, \
            'Shape is unknown for input port with index {} for node {}'.format(in_port_idx, name)

        transpose_order = list(range(port_shape.size))
        transpose_order[-1], transpose_order[-2] = transpose_order[
            -2], transpose_order[-1]

        transpose = create_op_node_with_second_input(
            graph, Transpose, int64_array(transpose_order),
            {'name': name + '/{}_port_transpose'.format(in_port_idx)})

        port_source = in_port.get_source()
        in_port.get_connection().set_source(transpose.out_port(0))
        transpose.in_port(0).connect(port_source)

        transpose['override_output_shape'] = True
예제 #19
0
 def replace_sub_graph(self, graph: Graph, match: dict):
     argmax_node = match['op']
     if not argmax_node.has_valid('axis'):
         flatten_node = create_op_node_with_second_input(
             graph, Reshape, int64_array([0, 1, -1]),
             dict(name=argmax_node.name + '/Flatten'))
         argmax_node.in_port(0).get_connection().insert_node(flatten_node)
         argmax_node.axis = 2
예제 #20
0
 def reshape_priorboxes(self, concat):
     for i, node in concat.in_nodes().items():
         reshape_node = create_op_node_with_second_input(
             concat.graph, Reshape, int64_array([1, -1]),
             dict(name=concat.name + str(i) + '/PriorBoxReshape_'))
         node.out_port(0).disconnect()
         node.out_port(0).connect(reshape_node.in_port(0))
         concat.in_port(i).connect(reshape_node.out_port(0))
예제 #21
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        mxreshape = match['op']
        if not mxreshape.reverse:
            return

        shape_node = Shape(graph, dict(name=mxreshape.id + '/Shape')).create_node()
        forward_reverse_unsqueeze_node = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
                                                                          dict(name=str(mxreshape.id) + '/ForwardUnsqueeze'))
        forward_reverse_node = Reverse(graph, dict(name=mxreshape.id + '/ForwardReverse', axis=1)).create_node()

        forward_reverse_squeeze_node = create_op_node_with_second_input(graph, Squeeze, int64_array([0]),
                                                                        dict(name=str(mxreshape.id) + '/ForwardSqueeze'))
        reshape_node = Reshape(graph, dict(name=mxreshape.id + '/Reshape')).create_node()
        shape_node.in_port(0).connect(mxreshape.in_port(0).get_source())
        mxreshape.in_port(0).get_connection().set_destination(reshape_node.in_port(0))

        forward_reverse_unsqueeze_node.in_port(0).connect(shape_node.out_port(0))
        forward_reverse_node.in_port(0).connect(forward_reverse_unsqueeze_node.out_port(0))
        forward_reverse_squeeze_node.in_port(0).connect(forward_reverse_node.out_port(0))
        reshape_node.in_port(1).connect(forward_reverse_squeeze_node.out_port(0))

        reshape_shape_node = create_op_node_with_second_input(graph, Reshape, int64_array(np.flip(mxreshape.dim, 0)),
                                                              dict(name=str(mxreshape.id) + '/ReshapeShape'))
        if np.sum(np.in1d([-2, -3, -4], mxreshape.dim), axis=0):
            reshape_shape_node = MXReshape(graph, dict(name=mxreshape.id + '/Reshape',
                                     dim=int64_array(np.flip(mxreshape.dim, 0)))).create_node()

        reshape_shape_node.in_port(0).connect(reshape_node.out_port(0))

        backward_shape_node = Shape(graph, dict(name=mxreshape.id + '/BackwardShape')).create_node()
        backward_reverse_unsqueeze_node = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
                                                                           dict(name=str(mxreshape.id) + '/BackwardUnsqueeze'))
        backward_reverse_node = Reverse(graph, dict(name=mxreshape.id + '/BackwardReverse', axis=1)).create_node()
        backward_reverse_squeeze_node = create_op_node_with_second_input(graph, Squeeze, int64_array([0]),
                                                                         dict(name=str(mxreshape.id) + '/BackwardSqueeze'))
        backward_reshape_node = Reshape(graph, dict(name=mxreshape.id + '/BackwardReshape')).create_node()

        backward_shape_node.in_port(0).connect(reshape_shape_node.out_port(0))
        backward_reverse_unsqueeze_node.in_port(0).connect(backward_shape_node.out_port(0))
        backward_reverse_node.in_port(0).connect(backward_reverse_unsqueeze_node.out_port(0))
        backward_reverse_squeeze_node.in_port(0).connect(backward_reverse_node.out_port(0))

        backward_reshape_node.in_port(0).connect(reshape_shape_node.out_port(0))
        backward_reshape_node.in_port(1).connect(backward_reverse_squeeze_node.out_port(0))

        mxreshape.out_port(0).get_connection().set_source(backward_reshape_node.out_port(0))
    def replace_op(self, graph: Graph, node: Node):
        if node.has_and_set('inputs_preprocessed'):
            log.debug('Node "{}" has already been preprocessed'.format(
                node.soft_get('name')))
            return []
        # reshape tensor with batch indices to 2d
        unsqueeze_node = create_op_node_with_second_input(
            graph, Unsqueeze, int64_array([1]),
            {'name': node.name + '/Unsqueeze'}, node.in_node(2))

        convert_node = Cast(
            graph, {
                'name':
                unsqueeze_node.name + '/ToFloat',
                'dst_type':
                data_type_str_to_np(graph.graph['cmd_params'].data_type)
            }).create_node()

        convert_node.in_port(0).connect(unsqueeze_node.out_port(0))

        concat_op = Concat(
            graph, {
                'axis': 1,
                'name': node.name + '/concat_batch_indices_and_boxes',
                'in_ports_count': 2
            })
        concat_node = concat_op.create_node([convert_node, node.in_node(1)])

        # do not remove edge with crop_size because it is needed in the partial infer
        graph.remove_edge(node.in_node(1).id, node.id)

        # input to the CropAndResize contains boxes coordinates in YXYX layout. But IE layer ROIPooling expects
        # coordinates in the XYXY layout, so convolution is added here to swap coordinates
        swapped_box_coordinates_node = add_convolution_to_swap_xy_coordinates(
            graph, concat_node, 5)

        # reshape locations tensor to 2D so it could be passed to Eltwise which will be converted to ScaleShift
        reshape_2d_node = create_op_node_with_second_input(
            graph, Reshape, int64_array([-1, 5]),
            dict(name=swapped_box_coordinates_node.id + '/reshape_2d_'),
            swapped_box_coordinates_node)
        graph.create_edge(reshape_2d_node, node, 0, 1)

        # do not replace any output edge
        return []
 def find_and_replace_pattern(self, graph: Graph):
     for nms in graph.get_op_nodes(op='NonMaxSuppression'):
         # make inputs 2 to 5 to have shape [1] instead of [0] (convert 0D to 1D)
         nms_name = nms.soft_get('name', nms.id)
         for port_id in range(2, 6):
             if port_id in nms.in_ports() and not nms.in_port(port_id).disconnected():
                 reshape_1d = create_op_node_with_second_input(graph, Reshape, int64_array([1]),
                                                               {'name': nms_name + '/Reshape_1D_{}'.format(port_id)})
                 nms.in_port(port_id).get_connection().insert_node(reshape_1d)
def insert_do(graph: Graph, replacement_descriptions: dict):
    do_outputs = replacement_descriptions['do_outputs']
    prior_boxes_node = Node(graph, 'ROIFeatureExtractor_2')
    num_classes = 81
    box_regressions_input_node = Node(
        graph, replacement_descriptions['box_regressions_input_node'])
    box_regressions_node = create_op_node_with_second_input(
        graph, Reshape, int64_array([-1, 4 * num_classes]),
        dict(name='box_regressions'), box_regressions_input_node)

    class_predicitons_node = Node(
        graph, replacement_descriptions['class_predicitons_node'])
    im_info_node = Parameter(graph, {
        "name": 'im_info',
        'shape': int64_array([1, 3])
    }).create_node()

    do_node = ExperimentalDetectronDetectionOutput(
        graph, {
            'name':
            'DetectionOutput',
            'class_agnostic_box_regression':
            0,
            'deltas_weights':
            np.array([10.0, 10.0, 5.0, 5.0]),
            'max_delta_log_wh':
            replacement_descriptions['max_delta_log_wh'],
            'nms_threshold':
            replacement_descriptions['nms_threshold'],
            'score_threshold':
            replacement_descriptions['score_threshold'],
            'num_classes':
            num_classes,
            'max_detections_per_image':
            replacement_descriptions['max_detections_per_image'],
            'post_nms_count':
            replacement_descriptions['post_nms_count']
        }).create_node()
    prior_boxes_node.out_port(1).connect(do_node.in_port(0))
    box_regressions_node.out_port(0).connect(do_node.in_port(1))
    class_predicitons_node.out_port(0).connect(do_node.in_port(2))
    im_info_node.out_port(0).connect(do_node.in_port(3))

    do_output_ports = [
        do_node.out_port(0),
        do_node.out_port(1),
        do_node.out_port(2)
    ]
    old_do_output_nodes = [Node(graph, node_id) for node_id in do_outputs]
    for old_node, new_port in zip(old_do_output_nodes, do_output_ports):
        old_node.out_port(0).get_connection().set_source(new_port)
    # the consumer of the second output port of the ExperimentalDetectronDetectionOutput is the Mul node which second
    # input is of type int64 so it is necessary to insert Cast to have data types match
    do_node.out_port(1).get_connection().insert_node(
        Cast(graph, {
            'dst_type': np.int64
        }).create_node())
예제 #25
0
def apply_biases_to_last_layer(graph, counts):
    r"""
    When user provides counts file, it is a file that contains log-apriory probabilities,
    technically it should be subtracted from the bias of the last layer unless it is a SoftMax.

    Case 1:
        weights ---\
        biases  ---\
    some layer  ---> AffineTransform ---> SoftMax

    Then, counts are applied to biases of Affine Transform:

        weights             ---\
        (biases - counts)   ---\
    some layer              ---> AffineTransform ---> SoftMax

    Case 2:
        weights ---\
        biases  ---\
    some layer  ---> AffineTransform

    Just takes the last layer and updates biases:

        weights             ---\
        (biases - counts)   ---\
    some layer              ---> AffineTransform

    Parameters
    ----------
    graph
    counts

    Returns
    -------

    """ ""
    outputs_ids = find_outputs(graph)
    for output in outputs_ids.copy():
        node = Node(graph, output)
        if node.op != 'Assign' and node.op != "MemoryOffset":
            continue
        outputs_ids.remove(output)

    if len(outputs_ids) > 1:
        raise Error('Ambiguity in applying counts to several outputs.')
    elif len(outputs_ids) == 0:
        raise Error('No outputs were found')

    target_node = Node(graph, outputs_ids[0])
    if target_node.op == 'SoftMax':
        target_node = target_node.in_port(0).get_source().node

    sub_node = create_op_node_with_second_input(graph, Add, -counts,
                                                {'name': 'sub_counts'})
    target_node.out_port(0).get_connection().set_source(sub_node.out_port(0))
    sub_node.in_port(0).connect(target_node.out_port(0))
예제 #26
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        node = match['flatten']
        name = node.soft_get('name', node.id)

        assert node.has_valid(
            'axis'
        ), 'Flatten {} should have `axis` attribute extracted, but it\'s not'.format(
            name)
        axis = node.axis

        reshape_node = Reshape(graph, {
            'name': node.id + '/Reshape'
        }).create_node()

        if axis == 0:
            dim = Const(
                graph, {
                    'value': int64_array([1, -1]),
                    'name': reshape_node.name + '/shape'
                }).create_node()
        elif axis == 1:
            dim = Const(
                graph, {
                    'value': int64_array([0, -1]),
                    'name': reshape_node.name + '/shape'
                }).create_node()
        else:
            shape = Shape(graph, {'name': name + '/input_shape'}).create_node()

            idxs = list(range(axis)) if axis > 0 else list(range(axis, 0))

            axis_shape_portion = node_to_get_shape_value_of_indices(
                shape, idxs)
            first_dims = create_op_node_with_second_input(
                graph, ReduceProd, int64_array([0]), {
                    'name': name + '/first_dims',
                    'keep_dims': True
                })
            second_dims = Const(graph, {
                'value': int64_array([-1]),
                'name': name + '/second_dims'
            }).create_node()

            node.in_port(0).get_source().connect(shape.in_port(0))
            axis_shape_portion.out_port(0).connect(first_dims.in_port(0))

            order_of_dims = [first_dims, second_dims
                             ] if axis > 0 else [second_dims, first_dims]

            dim = new_shape_node_from_shape_nodes(order_of_dims)

        reshape_node.in_port(1).connect(dim.out_port(0))

        node.out_port(0).get_connection().set_source(reshape_node.out_port(0))
        node.in_port(0).get_connection().set_destination(
            reshape_node.in_port(0))
예제 #27
0
    def replace_pattern(self, graph: Graph, match: dict):
        node = match['cell']
        cell_name = node.soft_get('name', node.id)
        cell_type = node.soft_get('type')
        WR_input_id = node.soft_get('wr_input_id')
        hidden_size_coef = node.soft_get('gates_count')
        hidden_size = node.get_attrs()["hidden_size"]

        # default values for RNNCell/GRUCell
        additional_port_id = 4
        if cell_type == "LSTMCell":
            additional_port_id = 5

        WR_shape = node.in_port(WR_input_id).data.get_shape()
        assert WR_shape is not None, "Undefined 'WR' input shape for Cell node '{}'".format(
            cell_name)
        assert is_fully_defined(
            WR_shape
        ), 'Not fully defined shape for WR for Cell node "{}"'.format(
            cell_name)

        num_elements_in_WR = np.prod(WR_shape)
        input_size = (num_elements_in_WR /
                      (hidden_size_coef * hidden_size)) - hidden_size

        # Reshape
        reshape = create_op_node_with_second_input(
            graph, Reshape,
            int64_array(
                [hidden_size_coef * hidden_size, hidden_size + input_size]),
            {'name': cell_name + '/Dims'})

        # VariadicSplit
        split = create_op_with_const_inputs(
            graph, VariadicSplit, {
                1: int64_array(1),
                2: int64_array([input_size, hidden_size])
            }, {
                'out_ports_count': 2,
                'name': cell_name + '/Split'
            }, reshape)

        # Cell
        node.in_port(WR_input_id).get_connection().set_destination(
            reshape.in_port(0))

        node.add_input_port(additional_port_id, skip_if_exist=True)
        assert node.in_port(additional_port_id).disconnected()

        # (x, y, WR, B) -> (x, y, W, R, B(additional_port))
        node.in_port(additional_port_id - 1).get_connection().set_destination(
            node.in_port(additional_port_id))
        split.out_port(0).connect(node.in_port(additional_port_id - 2))
        split.out_port(1).connect(node.in_port(additional_port_id - 1))
예제 #28
0
def replace_strided_slice(node: Node, mask: np.ndarray, op: callable):
    node_name = node.soft_get('name', node.id)
    axes = np.where(mask == 1)[0]
    new_node = create_op_node_with_second_input(node.graph, op,
                                                int64_array(axes))
    node.in_port(0).get_connection().set_destination(new_node.in_port(0))
    node.out_port(0).get_connection().set_source(new_node.out_port(0))

    rename_nodes([(node, node_name + '/ShouldBeDeleted'),
                  (new_node, node_name)])
    node.graph.remove_node(node.id)
예제 #29
0
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_op_nodes(type='GatherTree'):
            name = node.soft_get('name', node.id)
            assert 3 in node.in_ports() and not node.in_port(3).disconnected()

            end_token_shape = node.in_port(3).data.get_shape()
            assert end_token_shape is not None
            if end_token_shape.size == 1 and end_token_shape.ndim == 1:
                squeeze = create_op_node_with_second_input(graph, Squeeze, int64_array([0]),
                                                           {'name': name + '/Squeeze', 'override_output_shape': True})
                node.in_port(3).get_connection().insert_node(squeeze)
예제 #30
0
    def replace(node: Node, const: Node):
        graph = node.graph
        shape = const.shape
        const_name = const.soft_get('name', const.id)

        non_one_dims = np.argwhere(shape != 1).flatten()
        one_dims = np.argwhere(shape == 1).flatten()

        if not (non_one_dims.size == 1 and 5 < np.prod(shape) < 500):
            # (5;500) range is deduced to affect less models
            return

        value = const.value
        if not np.array_equal(np.arange(0, np.prod(shape), 1).reshape(shape), value):
            return

        positive_idx = non_one_dims.item(0)
        negative_idx = positive_idx - len(shape)

        node_name = node.soft_get('name', node.id)
        gather = create_op_with_const_inputs(graph, Gather, {1: int64_array(negative_idx), 2: int64_array(0)},
                                             {'name': node_name + '/BroadcastingDim'})
        gather_for_const = create_op_with_const_inputs(graph, Gather, {1: int64_array(negative_idx), 2: int64_array(0)},
                                                       {'name': const_name + '/BroadcastingDim'})
        shapeof_node = Shape(graph, {'name': const_name + '/ShapeOf'}).create_node()
        shapeof_node.out_port(0).connect(gather_for_const.in_port(0))

        equal_node = create_op_with_const_inputs(graph, Equal, {1: int64_array(1)}, {'name': node_name + '/ConstOne'})
        gather.out_port(0).connect(equal_node.in_port(0))

        select_node = Select(graph, {'name': node_name + '/Select',
                                      'auto_broadcast': 'numpy'}).create_node([equal_node, gather_for_const, gather])

        const.out_port(0).connect(shapeof_node.in_port(0))

        range_node = create_op_with_const_inputs(graph, Range,
                                                 {0: mo_array(0, dtype=value.dtype),
                                                  2: mo_array(1, dtype=value.dtype)},
                                                 {'name': const_name + '/Range', 'dtype': value.dtype})
        select_node.out_port(0).connect(range_node.in_port(1))

        node.in_port(1).get_connection().add_destination(gather.in_port(0))

        node.in_port(0).get_connection().set_source(range_node.out_port(0))

        if one_dims.size:
            unsqueeze = create_op_node_with_second_input(graph, Unsqueeze, one_dims,
                                                         {'name': const_name + '/KeepShape'})
            range_node.out_port(0).get_connection().insert_node(unsqueeze)
            rename_nodes([(const, const_name + '/ToBeDeleted'), (unsqueeze, const_name)])
        else:
            rename_nodes([(const, const_name + '/ToBeDeleted'), (range_node, const_name)])