def extract(cls, node: Node):
        attrs = {'axis': node.pb.bias_param.axis}
        embed_input(attrs, 1, 'bias', node.model_pb.blobs[0].data, 'biases')

        Add.update_node_stat(node, attrs)

        return cls.enabled
Example #2
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['op']
        if node.has_port('in', 2) and not node.in_port(
                2).disconnected() and not node.has_and_set('shape_input'):
            bias_name = node.name
            new_node_name = node.name + '/WithoutBiases'
            add = Add(graph, dict(name=bias_name)).create_node()
            rename_nodes([(node, new_node_name), (add, bias_name)])
            node.out_port(0).get_connection().set_source(add.out_port(0))
            node.out_port(0).connect(add.in_port(0))
            node.in_port(2).get_connection().set_destination(add.in_port(1))

            bias = add.in_port(1).get_source().node
            if bias.has_valid("type") and bias.type == "Const":
                input_shape = add.in_port(0).data.get_shape()
                if len(input_shape) > 2:
                    dims_to_add = len(input_shape) - 2 if graph.graph[
                        'layout'] == 'NCHW' else 0
                    if dims_to_add > 0:
                        reshape = create_op_node_with_second_input(
                            graph, Reshape,
                            int64_array([input_shape[1]] + [1] * dims_to_add),
                            {'name': node.id + '/Dims'})
                        add.in_port(1).get_connection().set_destination(
                            reshape.in_port(0))
                        reshape.out_port(0).connect(add.in_port(1))
Example #3
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        op = match['op']
        out_port = op.in_port(0).get_source()

        if op.soft_get('scale', 1) != 1:
            const = Const(graph, {'value': np.array(op.scale)}).create_node()
            mul = Mul(graph, {'name': op.name + '/mul_'}).create_node()
            const.out_port(0).connect(mul.in_port(1))
            out_port.connect(mul.in_port(0))
            out_port = mul.out_port(0)

        if op.soft_get('shift', 0) != 0:
            const = Const(graph, {'value': np.array(op.shift)}).create_node()
            add = Add(graph, {'name': op.name + '/add_'}).create_node()
            const.out_port(0).connect(add.in_port(1))
            out_port.connect(add.in_port(0))
            out_port = add.out_port(0)

        if op.soft_get('power', 1) != 1:
            const = Const(graph, {'value': np.array(op.power)}).create_node()
            pow = Pow(graph, {'name': op.name + '/pow_'}).create_node()
            const.out_port(0).connect(pow.in_port(1))
            out_port.connect(pow.in_port(0))
            out_port = pow.out_port(0)

        op.out_port(0).get_connection().set_source(out_port)
    def replace_sub_graph(self, graph: Graph, match: [dict, SubgraphMatch]):
        cmp = match['complex']
        complex_abs = match['abs']
        complex_abs_name = complex_abs.soft_get('name', complex_abs.id)

        power_type = data_type_str_to_np(graph.graph['cmd_params'].data_type)

        pow0 = create_op_with_const_inputs(
            graph, Pow, {1: power_type(2.0)},
            {'name': complex_abs_name + '/real_part_squared'})
        pow1 = create_op_with_const_inputs(
            graph, Pow, {1: power_type(2.0)},
            {'name': complex_abs_name + '/imag_part_squared'})

        cmp.in_port(0).get_connection().set_destination(pow0.in_port(0))
        cmp.in_port(1).get_connection().set_destination(pow1.in_port(0))

        add = Add(graph, {
            'name': complex_abs_name + '/squared_abs'
        }).create_node([pow0, pow1])
        sqrt = create_op_with_const_inputs(graph, Pow, {1: power_type(0.5)},
                                           {})
        add.out_port(0).connect(sqrt.in_port(0))

        complex_abs.out_port(0).get_connection().set_source(sqrt.out_port(0))

        rename_nodes([(complex_abs, complex_abs_name + '/to_be_removed'),
                      (sqrt, complex_abs_name)])
Example #5
0
    def replace_pattern(self, graph: Graph, match: dict):
        node = match['op']
        if (node.data_format != b'NHWC' or len(node.in_nodes()) != 5
                or node.in_node(0).value is not None or  # input
                node.in_node(1).value is None or  # scale
                node.in_node(2).value is None or  # offset
                node.in_node(3).value is not None or  # mean
                node.in_node(4).value is not None or  # variance
                node.in_node(1).value.ndim != 1 or
                node.in_node(2).value.ndim != 1):
            return

        scale_mul = Mul(graph, dict(name=node.name + '/scale_mul_'))
        shift_add = Add(graph, dict(name=node.name + '/shift_add_'))
        mean_add = Add(graph, dict(name=node.name + '/mean_add_'))
        variance_mul = Mul(graph, dict(name=node.name + '/variance_mul_'))

        neg_const = Const(
            graph, dict(value=np.array(-1), name=node.name + '/mean_negate_'))
        mean_negate = Mul(graph, dict(name=node.name + '/mean_negate_'))
        mean_arg = mean_add.create_node_with_data([
            node.in_node(0),
            mean_negate.create_node_with_data(
                [node.in_node(3),
                 neg_const.create_node_with_data()])
        ])

        shift_const = Const(
            graph,
            dict(value=node.eps,
                 name=node.name + '/variance_denom_shift_const_'))
        power_const = Const(
            graph,
            dict(value=-0.5, name=node.name + '/variance_denom_power_const_'))
        variance_denom_shift = Add(
            graph, dict(name=node.name + '/variance_denom_shift_'))
        variance_denom_power = Pow(
            graph, dict(name=node.name + '/variance_denom_power_'))
        variance_arg = variance_mul.create_node_with_data([
            mean_arg,
            variance_denom_power.create_node_with_data([
                variance_denom_shift.create_node_with_data(
                    [node.in_node(4),
                     shift_const.create_node_with_data()]),
                power_const.create_node_with_data()
            ])
        ])

        shift_add.create_node_with_data([
            scale_mul.create_node_with_data([variance_arg,
                                             node.in_node(1)]),
            node.in_node(2)
        ],
                                        data_nodes=node.out_node())

        node.graph.remove_node(node.id)
Example #6
0
    def replace_sub_graph(self, graph: Graph, match: [dict, SubgraphMatch]):
        node = match['op']
        name = node.soft_get('name', node.id)

        # biases normalization
        if 2 in node.in_ports() and not node.in_port(2).disconnected():
            bias_node = Add(graph, {'name': name + '/Bias_'}).create_node()
            node_name = node.name + '/WithoutBiases'
            bias_node_name = node.name
            rename_nodes([(node, node_name), (bias_node, bias_node_name)])
            node.out_port(0).get_connection().set_source(bias_node.out_port(0))
            node.in_port(2).get_connection().set_destination(
                bias_node.in_port(1))
            node.out_port(0).connect(bias_node.in_port(0))

        # weights normalization
        assert node.has_valid('out-size')
        out_size = node['out-size']
        reshape_dim = int64_array([-1, out_size])
        if node.has_and_set('transpose_weights'):
            reshape_dim = int64_array([out_size, -1])
        node.insert_op_on_input_port(
            in_port_idx=1,
            new_op_class=Reshape,
            new_op_attrs={'name': name + '/weights_reshape'},
            value=reshape_dim)
        if node.has_and_set('transpose_weights'):
            node.insert_op_on_input_port(
                in_port_idx=1,
                new_op_class=Transpose,
                new_op_attrs={'name': name + '/weights_transpose'},
                value=int64_array([1, 0]))

        # input normalization for 4D Caffe and MXNet FullyConnected
        if graph.graph['fw'] == 'caffe':
            node.insert_op_on_input_port(in_port_idx=0,
                                         new_op_class=Reshape,
                                         new_op_attrs={
                                             'name':
                                             name + '/flatten_fc_input',
                                             'special_zero': True
                                         },
                                         value=int64_array([0, -1]))

        if graph.graph['fw'] == 'mxnet':
            if node.flatten is not False:
                node.insert_op_on_input_port(in_port_idx=0,
                                             new_op_class=Reshape,
                                             new_op_attrs={
                                                 'name':
                                                 name + '/flatten_fc_input',
                                                 'special_zero': True
                                             },
                                             value=int64_array([0, -1]))

        MatMul.update_node_stat(node, {})
Example #7
0
    def replace_op(self, graph: Graph, node: Node):
        in_node_0 = node.in_node(0)
        in_node_1 = node.in_node(1)
        in_node_2 = node.in_node(2)

        ss = ScaleShiftOp(graph, {'name': node.id + "/ScaleShift_", 'axis': 0})
        scale_shift = ss.create_node(inputs=[in_node_1, in_node_0])

        el = Add(graph, {'name': node.id + "/Add_"})
        el_node = el.create_node(inputs=[scale_shift, in_node_2])

        return [el_node.id]
    def replace_pattern(self, graph: Graph, match: dict):
        bias_add = match['BiasAdd']

        # Replace BiasAdd by Add operation
        new_add = Add(graph, {'name': bias_add.id + '/Add'}).create_node()

        bias_add.in_port(0).get_connection().set_destination(new_add.in_port(0))
        bias_add.in_port(1).get_connection().set_destination(new_add.in_port(1))
        bias_add.out_port(0).get_connection().set_source(new_add.out_port(0))

        if bias_add.data_format != 'NCHW':
            return

        input_shape = new_add.in_port(0).data.get_shape()
        bias_shape = new_add.in_port(1).data.get_shape()
        assert len(bias_shape) == 1

        unsqueeze_dims = np.arange(len(input_shape))
        channel_dim = get_features_dim('NCHW', len(input_shape))
        unsqueeze_dims = np.delete(unsqueeze_dims, channel_dim, 0)

        unsqueeze_node = Unsqueeze(graph, {'name': new_add.id + '/BiasUnsqueeze'}).create_node()
        unsqueeze_dims_node = Const(graph, {'name': new_add.id + '/Dims',
                                            'value': unsqueeze_dims}).create_node()
        # Reconnecting nodes
        unsqueeze_node.in_port(1).connect(unsqueeze_dims_node.out_port(0))
        unsqueeze_node['override_output_shape'] = True

        new_add.in_port(1).get_connection().insert_node(unsqueeze_node)
Example #9
0
    def replace_op(self, graph: Graph, node: Node):
        matmul = MatMul(graph, dict(name=node.name, transpose_b=True)).create_node([node.in_node(0), node.in_node(1)])

        # Bias
        if len(node.in_nodes()) > 2:
            matmul = Add(graph, dict(name=node.name + '/bias')).create_node([matmul, node.in_node(2)])

        return [matmul.id]
Example #10
0
def create_bias_node(graph: Graph, src_node):
    logger.debug('Creating new bias for {}'.format(src_node.name))
    destination_ports = []
    for dest_port in src_node.out_port(0).get_destinations():
        destination_ports.append(dest_port)

    # Create Add and constant with zero bias
    bias_shape = src_node.out_port(0).data.get_shape()
    add_bias_shape = [1] * len(bias_shape)
    add_bias_shape[1] = bias_shape[1]
    weights = get_weights_for_node(src_node)
    bias_dtype = np.float32
    if weights and weights.out_port(0).is_data_type_defined():
        bias_dtype = weights.out_port(0).get_data_type()
    add_bias = Const(
        graph, {
            'value': np.zeros(add_bias_shape, dtype=bias_dtype),
            'shape': add_bias_shape,
            'need_shape_inference': True
        }).create_node()
    add_op = Add(graph, {
        'name': src_node.name + '/add_',
        'need_shape_inference': True
    }).create_node()

    # Connect Const to Add node
    add_op.in_port(1).connect(add_bias.out_port(0))

    # Reconnect src_node -> output to src_node -> Add -> output
    src_node.out_port(0).disconnect()
    src_node.out_port(0).get_connection().set_destination(add_op.in_port(0))

    for destination_port in destination_ports:
        add_op.out_port(0).connect(destination_port)
    add_bias.out_node(0)['Insert_Convert_operation_after'] = True
Example #11
0
    def replace_op(self, graph: Graph, node: Node):
        weight = node.module.weight.detach().numpy()
        bias = node.module.bias.detach().numpy()

        weight = Const(graph, {'value': weight}).create_node()
        bias = Const(graph, {'value': bias}).create_node()
        matmul = MatMul(graph, dict(name=node.name)).create_node([node.in_node(0), weight])
        matmul = Add(graph, dict(name=node.name + '/bias')).create_node([matmul, bias])
        return [matmul.id]
    def replace_pattern(self, graph: Graph, match: dict):
        quantize = match['quantize']

        sum_node = Add(graph, dict()).create_node()
        const = Const(graph, {'value': mo_array(0.5)}).create_node()
        mul_node = Mul(graph, dict()).create_node()

        mul_node.in_port(0).connect(sum_node.out_port(0))
        mul_node.in_port(1).connect(const.out_port(0))

        quantize.in_port(1).get_connection().get_source().connect(sum_node.in_port(0))
        quantize.in_port(2).get_connection().get_source().connect(sum_node.in_port(1))

        quantize.in_port(1).disconnect()
        quantize.in_port(2).disconnect()

        mul_node.out_port(0).connect(quantize.in_port(1))
        mul_node.out_port(0).connect(quantize.in_port(2))
Example #13
0
def get_range_node_of_idxs(rank: Node,
                           begin: int,
                           end: int,
                           include_begin: bool = True,
                           include_end: bool = False) -> Node:
    """
    Returns node that produces 1D output of values of range from begin to end (ex)/(in)cluding begin or end point

    :param rank: the node of 0D output shape to get rank of tensor from
    :param begin: integer value from [-rank; rank - 1]
    :param end: integer value from [-rank; +rank]
    :param include_begin: boolean flag to include or exclude start point from range output
    :param include_end: boolean flag to include or exclude end point from range output
    :return: range node producing 1D output
    """
    graph = rank.graph
    name = rank.soft_get('name', rank.id)

    start_idx = get_canonical_axis_index_node(rank, begin)
    end_idx = get_canonical_axis_index_node(rank, end)

    if not include_begin:
        const = Const(graph, {
            'value': int64_array(1),
            'name': name + '/exclude_begin/value'
        }).create_node()
        add = Add(graph, {'name': name + '/exclude_begin'}).create_node()
        start_idx.out_port(0).connect(add.in_port(0))
        const.out_port(0).connect(add.in_port(1))
        start_idx = add

    if include_end:
        const = Const(graph, {
            'value': int64_array(1),
            'name': name + '/including_end/value'
        }).create_node()
        add = Add(graph, {'name': name + '/including_end'}).create_node()
        end_idx.out_port(0).connect(add.in_port(0))
        const.out_port(0).connect(add.in_port(1))
        end_idx = add

    delta = Const(graph, {
        'name': name + '/delta',
        'value': int64_array(1)
    }).create_node()
    range_node = Range(graph, {'name': name + '/range_idxs'}).create_node()

    start_idx.out_port(0).connect(range_node.in_port(0))
    end_idx.out_port(0).connect(range_node.in_port(1))
    delta.out_port(0).connect(range_node.in_port(2))

    return range_node
Example #14
0
def calculate_prior_box_value(value: Node, value_to_div: Port,
                              value_to_add: Port):
    """
    :param value: Node with value. Here is supposed the node with op='Split'
    :param value_to_div: Output port with values to be divided by 2
    :param value_to_add: Output port with values to be added to values from value_to_div port
    :return: Sub and Add nodes

    The sub-graph can be described by formulas:
    min = value[value_to_add] - (value[value_to_div] / 2)
    max = value[value_to_add] + (value[value_to_div] / 2)
    """
    graph = value.graph
    dtype = data_type_str_to_np(graph.graph['cmd_params'].data_type)
    _min = Sub(graph, dict(name=value.name + '/Sub')).create_node()
    div = create_op_node_with_second_input(graph,
                                           Div,
                                           mo_array([2], dtype=dtype),
                                           op_attrs=dict(name=value.name +
                                                         '/Div'))
    div.in_port(0).connect(value_to_div)
    _min.in_port(0).connect(value_to_add)
    _min.in_port(1).connect(div.out_port(0))

    _max = Add(graph, dict(name=value.name + '/Add')).create_node()
    _max.in_port(0).connect(div.out_port(0))
    _max.in_port(1).connect(value_to_add)

    return _min, _max
    def replace_op(self, graph: Graph, node: Node):
        name = node.soft_get('name', node.id)

        # create range of axes for MVN based on `start_axis` and rank of input
        rank = Rank(graph, {'name': name + '/Rank'}).create_node()
        rng = create_op_with_const_inputs(graph, Range, {
            0: int64_array(2),
            2: int64_array(1)
        }, {
            'name': name + '/Range',
            'output_type': np.int64
        })
        mvn = MVN(
            graph, {
                'eps': node.epsilon,
                'eps_mode': 'inside_sqrt',
                'normalize_variance': 1,
                'name': name + '/Ins_Norm/MVN_',
            }).create_node()
        node.in_port(0).get_connection().set_destination(mvn.in_port(0))
        rng.out_port(0).connect(mvn.in_port(1))
        mul = Mul(graph, {
            'axis': 1,
            'name': name + '/Ins_Norm/mul_'
        }).create_node()
        mvn.out_port(0).connect(mul.in_port(0))
        node.in_port(1).get_connection().set_destination(mul.in_port(1))
        add = Add(graph, {
            'axis': 1,
            'name': name + '/Ins_Norm/add_'
        }).create_node()
        mul.out_port(0).connect(add.in_port(0))
        node.in_port(2).get_connection().set_destination(add.in_port(1))

        mvn.in_port(0).get_connection().add_destination(rank.in_port(0))
        rng.in_port(1).connect(rank.out_port(0))

        rename_nodes([(node, name + '/TBD'), (add, name)])

        return [add.id]
Example #16
0
def add_constant_to_negative_values(node: Node, port_idx: int, added_value: np.array):
    """
    This function adds the given values to negative elements of value from the given input port.
    :param node: node with corrected values in the input port port_idx
    :param port_idx: input port index for negative values
    :param added_value: the value to add
    :return: None
    """
    negative_values_source = node.in_port(port_idx).get_source()
    negative_values_node = node.in_port(port_idx).get_source().node
    negative_values_node_name = negative_values_node.soft_get('name', negative_values_node.id)

    graph = node.graph

    less_node = create_op_with_const_inputs(graph, Less,
                                            {1: mo_array(0, dtype=added_value.dtype)},
                                            {'name': negative_values_node_name + '/Less'})
    mul_node = create_op_with_const_inputs(graph, Mul, {1: added_value}, {'name': negative_values_node_name + '/Mul'})

    node.in_port(port_idx).get_connection().set_destination(less_node.in_port(0))
    less_node.out_port(0).connect(mul_node.in_port(0))

    add_node = Add(graph, {}).create_node()
    mul_node.out_port(0).connect(add_node.in_port(1))
    negative_values_source.connect(add_node.in_port(0))
    add_node.out_port(0).connect(node.in_port(port_idx))
Example #17
0
    def sub_to_add_replacement(sub: Node):
        # we execute this transformation for V10 IR later on middle phase despite graph_condition
        # so we prevent Sub replacement on shape-calculating sub-graphs
        if sub.in_port(0).data.get_value() is not None and sub.in_port(
                1).data.get_value() is not None:
            return

        graph = sub.graph
        name = sub.soft_get('name', sub.id)

        # keep Add name the same as Sub -- because of mathematical equality of output tensors
        rename_node(node=sub, name=name + '/to_be_removed')

        # reconnect Sub in(out)puts to Add
        add = Add(graph, {'name': name}).create_node()
        rename_node(add, name)

        sub.in_port(0).get_connection().set_destination(add.in_port(0))
        sub.in_port(1).get_connection().set_destination(add.in_port(1))
        sub.out_port(0).get_connection().set_source(add.out_port(0))

        # restore mathematical equivalence to Sub operation: Sub(A, B) = Add(A, Mul(B, -1))
        const_dtype = sub.soft_get('data_type', np.float32)
        negate = create_op_with_const_inputs(
            graph, Mul, {1: np.array(-1, dtype=const_dtype)},
            {'name': name + '/neg_'})
        add.in_port(1).get_connection().insert_node(negate)
Example #18
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        fbn = match['fbn']
        input = fbn.in_node(0)
        log.debug('Found potential MVN pattern after {} with name {}'.format(
            input.op, input.name))
        if input.id != match['mean'].in_node(
                0).id or input.id != match['sqdiff'].in_node(0).id:
            return

        log.debug('Confirmed MVN pattern after {} with name {}'.format(
            input.op, input.name))

        mvn = MVN(
            graph,
            dict(name=fbn.name + '/MVN_',
                 eps=fbn.eps,
                 eps_mode='outside_sqrt',
                 normalize_variance=1))
        mvn.attrs['old_infer'] = mvn.attrs['infer']
        mvn.attrs['infer'] = __class__.infer

        mul = Mul(graph, dict(operation='mul', name=fbn.name + '/Mul_'))
        add = Add(graph, dict(operation='sum', name=fbn.name + '/Add_'))

        input_gamma = fbn.in_node(1)
        input_beta = fbn.in_node(2)

        mean_reduction = match['mean'].in_node(1)
        variance_reduction = match['variance'].in_node(1)

        new_subgraph = add.create_node([
            mul.create_node([
                mvn.create_node([input, mean_reduction, variance_reduction]),
                input_gamma
            ]), input_beta
        ])
        fbn.replace_node(new_subgraph)
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_op_nodes(op='LayerNorm'):
            node_name = node.soft_get('name', node.id)

            if node.output_mean_var is True:
                if not node.out_port(1).disconnected() or not node.out_port(2).disconnected():
                    raise Error("Node {} is supported with only one output".format(node_name))
                log.error('LayerNorm node {} with attribute "output_mean_var" = True is not supported.'
                          'But since the node has one output, the conversion will continue.'.format(node_name),
                          extra={'is_warning': True})

            input_shape = node.in_port(0).data.get_shape()
            assert node.has_valid('axis'), 'Incorrect axis value for the node {}'.format(node_name)
            axis = node.axis

            mvn = create_op_node_with_second_input(graph, MVN, int64_array([axis]),
                                                   dict(eps=node.epsilon, name=node_name + '/LayerNorm/MVN_',
                                                        across_channels=1, normalize_variance=1, eps_mode='inside_sqrt'))

            mul = Mul(graph, {'name': node_name + '/LayerNorm/mul_'}).create_node()
            add = Add(graph, {'name': mul.name + '/LayerNorm/add_'}).create_node()

            node.in_port(0).get_connection().set_destination(mvn.in_port(0))
            node.in_port(1).get_connection().set_destination(mul.in_port(1))
            node.in_port(2).get_connection().set_destination(add.in_port(1))

            mvn.out_port(0).connect(mul.in_port(0))
            mul.out_port(0).connect(add.in_port(0))
            node.out_port(0).get_connection().set_source(add.out_port(0))

            # MXNet LayerNorm gamma and beta attributes are 1D tensors with shape = [input_shape[axis]]
            # We have to unsqueeze values for Mul and Add operations to avoid shapes incompatibility problems
            # if axis != -1
            canonical_axis = get_canonical_axis_index(input_shape, axis)
            unsqueeze_value = []
            for idx, val in enumerate(input_shape):
                if idx != canonical_axis:
                    unsqueeze_value.append(idx)

            mul_const_unsqueeze = create_op_node_with_second_input(graph, Unsqueeze,
                                                                   int64_array(unsqueeze_value),
                                                                   dict(name=mul.name + '/Unsqueeze',
                                                                        override_output_shape=True))
            add_const_unsqueeze = create_op_node_with_second_input(graph, Unsqueeze,
                                                                   int64_array(unsqueeze_value),
                                                                   dict(name=add.name + '/Unsqueeze',
                                                                        override_output_shape=True))

            mul.in_port(1).get_connection().insert_node(mul_const_unsqueeze)
            add.in_port(1).get_connection().insert_node(add_const_unsqueeze)

            rename_nodes([(node, node_name + '/ShouldBeDeleted'), (add, node_name)])
Example #20
0
def get_canonical_axis_index_node(rank: Node, axis: int) -> Node:
    """
    Returns positive axis value

    :param rank: the node of 0D output shape to get rank of tensor from
    :param axis: integer value from [-rank; rank - 1]
    :return: node producing positive integer value of axis
    """
    graph = rank.graph
    name = rank.soft_get('name', rank.id)
    if axis < 0:
        axis = Const(graph, {
            'name': name + '/negative_axis',
            'value': int64_array(axis)
        }).create_node()
        add = Add(graph, {'name': name + '/positive_axis'}).create_node()
        rank.out_port(0).connect(add.in_port(0))
        axis.out_port(0).connect(add.in_port(1))
        return add
    else:
        return Const(graph, {
            'name': name + '/positive_axis',
            'value': int64_array(axis)
        }).create_node()
Example #21
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        # This replacer replace ImageScalar operation to Mul->Add sequence
        # Also it check that weights and biases are good
        op = match['op']

        # Check that weights and biases are not useless
        has_bias, has_weights = True, True
        if all([x == 1 for x in np.nditer(op.scale)]):
            has_weights = False
        if all([x == 0 for x in np.nditer(op.bias)]):
            has_bias = False

        assert len(op.in_ports()) == 1

        last_port = op.in_port(0).get_source()

        # Create Mul & Add nodes
        if has_weights:
            mul_weights = Const(graph,
                                dict(value=op.scale,
                                     shape=op.scale.shape)).create_node()
            mul_op = Mul(graph, dict(name=op.id + '/mul_')).create_node()
            op.in_port(0).get_connection().set_destination(mul_op.in_port(0))
            mul_weights.out_port(0).connect(mul_op.in_port(1))
            last_port = mul_op.out_port(0)

        if has_bias:
            add_bias = Const(graph, dict(value=op.bias,
                                         shape=op.bias.shape)).create_node()
            add_op = Add(graph, dict(name=op.id + '/add_')).create_node()
            last_port.get_connection().set_destination(add_op.in_port(0))
            add_bias.out_port(0).connect(add_op.in_port(1))
            last_port = add_op.out_port(0)

        op.in_port(0).disconnect()
        op.out_port(0).get_connection().set_source(last_port)
    def replace_op(self, graph: Graph, node: Node):
        axis = Const(graph, {'value': int64_array([-1])}).create_node()
        mvn = MVN(
            graph,
            dict(name=node.name + '/mvn',
                 eps=node.module.eps,
                 normalize_variance=True,
                 eps_mode='inside_sqrt')).create_node([node.in_node(0), axis])

        weight = node.module.weight.detach().numpy()
        bias = node.module.bias.detach().numpy()

        w = Const(graph, {'value': weight}).create_node()
        b = Const(graph, {'value': bias}).create_node()
        mul = Mul(graph, dict(name=node.name + '/mul')).create_node([mvn, w])
        add = Add(graph, dict(name=node.name + '/add')).create_node([mul, b])
        return [add.id]
    def replace_op(self, graph: Graph, node: Node):
        mean = node.module.running_mean.detach().numpy()
        var = node.module.running_var.detach().numpy()
        weight = node.module.weight.detach().numpy()
        bias = node.module.bias.detach().numpy()

        w = weight / np.sqrt(var + node.module.eps)
        b = bias - w * mean

        shape = np.ones(node.module.dims, dtype=np.int32)
        shape[1] = -1  # channels

        w = Const(graph, {'value': w.reshape(shape)}).create_node()
        b = Const(graph, {'value': b.reshape(shape)}).create_node()
        mul = Mul(graph, dict(name=node.name + '/mul')).create_node(
            [node.in_node(0), w])
        add = Add(graph, dict(name=node.name + '/add')).create_node([mul, b])
        return [add.id]
Example #24
0
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_op_nodes(op='Gemm'):
            name = node.soft_get('name', node.id)
            node_output_port = node.out_port(0)
            if node.has_valid('alpha') and not math.isclose(node.alpha, 1):
                mul_alpha = create_op_with_const_inputs(
                    graph, Mul, {1: mo_array(node.alpha)}, {
                        'name': name + '/Alpha',
                        'can_be_scaleshift': False
                    })
                node_output_port.get_connection().insert_node(mul_alpha)
                node_output_port = mul_alpha.out_port(0)
                del node['alpha']

            if node.is_in_port_connected(2):
                # biases normalization
                bias_node = Add(graph, {
                    'name': name + '/Bias_',
                    'can_be_scaleshift': False
                }).create_node()
                without_biases_node_name = name + '/WithoutBiases'
                rename_nodes([(node, without_biases_node_name),
                              (bias_node, name)])
                node_output_port.get_connection().set_source(
                    bias_node.out_port(0))
                node.in_port(2).get_connection().set_destination(
                    bias_node.in_port(1))
                node_output_port.connect(bias_node.in_port(0))
                if node.has_valid('beta') and not math.isclose(node.beta, 1):
                    bias_node.insert_op_on_input_port(in_port_idx=1,
                                                      new_op_class=Mul,
                                                      value=mo_array(
                                                          node.beta),
                                                      new_op_attrs={
                                                          'name':
                                                          name + '/Beta',
                                                          'can_be_scaleshift':
                                                          False
                                                      })
                    del node['beta']

            MatMul.update_node_stat(
                node, {
                    'transpose_a': node.has_and_set('transpose_a'),
                    'transpose_b': node.has_and_set('transpose_b'),
                })
Example #25
0
    def replace_op(self, graph: Graph, node: Node):
        node_name = node.soft_get('name', node.id)
        const_dtype = np.float32
        if node.has_valid('data_type'):
            const_dtype = node.data_type
        const = Const(graph, {'value': mo_array([1], dtype=const_dtype)}).create_node()
        add = Add(graph, {'name': node.name + '/Add_'}).create_node()
        log = Log(graph, {'name': node.name + '/Log_'}).create_node()

        # Connect nodes: input -> Add -> Log
        const.out_port(0).connect(add.in_port(0))
        node.in_port(0).get_connection().set_destination(add.in_port(1))
        add.out_port(0).connect(log.in_port(0))
        rename_nodes([(node, node_name + '/delete'), (log, node_name)])

        # The "explicit" version of the return value is: [(out_node.id, 0)])
        return [log.id]
    def replace_op(self, graph: Graph, node: Node):
        ss_node = create_op_with_const_inputs(graph, Split, {1: int64_array(1)}, {'name': 'Split_eltwise_' + node.name,
                                                                                  'num_splits': node['num_inputs']})

        inp = node.get_inputs()
        in_node = inp[0][0]
        edge_attrs = inp[0][1]
        graph.add_edge(in_node, ss_node.id, **edge_attrs)
        if ss_node.num_splits == 2:
            if node['operation'] == 'mul':
                eltwise_node = Mul(graph, attrs={'name': 'Eltwise_' + node.name}).create_node()
            elif node['operation'] == 'sum':
                eltwise_node = Add(graph, attrs={'name': 'Eltwise_' + node.name}).create_node()
            else:
                raise Error('Error on replacing Kaldi eltwise: unknown type ' + node['operation'])
        elif ss_node.num_splits > 2:
            eltwise_node = EltwiseN(graph, attrs={'name': 'Eltwise_' + node.name,
                                                  'operation': node['operation']}).create_node()
        else:
            raise Error('Error on replacing Kaldi eltwise')
        for i in range(ss_node.num_splits):
            ss_node.out_port(i).get_connection().set_destination(eltwise_node.in_port(i))
        return [eltwise_node.id]
Example #27
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        tf_slice_node = match['op']
        slice_name = tf_slice_node.soft_get('name', tf_slice_node.id)
        slice_node = Slice(graph).create_node()
        rename_nodes([(tf_slice_node, slice_name + '/to_be_removed'),
                      (slice_node, slice_name)])
        ends_node = Add(graph, {'name': slice_name + '/ends'}).create_node()

        # reconnect input, begin, and size from TFSlice to the subgraph with Slice
        tf_slice_node.in_port(0).get_connection().set_destination(
            slice_node.in_port(0))
        tf_slice_node.in_port(1).get_connection().set_destination(
            slice_node.in_port(1))
        tf_slice_node.in_port(2).get_connection().set_destination(
            ends_node.in_port(0))
        slice_node.in_port(1).get_connection().add_destination(
            ends_node.in_port(1))

        max_ends = Shape(graph, {
            'name': slice_name + '/ShapeOf'
        }).create_node()
        slice_node.in_port(0).get_connection().add_destination(
            max_ends.in_port(0))

        # check if size[i] == -1, will be applied elementwisely: len(size) = len(begin) = input_rank
        where_max_ends_is_needed = create_op_with_const_inputs(
            graph, Equal, {0: int64_array(-1)},
            {'name': slice_name + '/where_max_ends_is_needed'})
        ends_node.in_port(0).get_connection().add_destination(
            where_max_ends_is_needed.in_port(1))
        # select requires equal dtypes, need to convert ends to I64
        ends_casted_to_i64 = Cast(graph, {
            'name': slice_name + '/CastToI64',
            'dst_type': np.int64
        }).create_node([ends_node])
        # if size[i] == 1 then take max_ends values
        correct_ends = Select(graph, {
            'name': slice_name + '/chosen_ends'
        }).create_node(
            [where_max_ends_is_needed, max_ends, ends_casted_to_i64])
        correct_ends.out_port(0).connect(slice_node.in_port(2))

        tf_slice_node.out_port(0).get_connection().set_source(
            slice_node.out_port(0))
Example #28
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        node = match['op']

        if 1 not in node.in_ports() or node.in_port(1).disconnected():

            if node.has_valid('factor') and not node.has_valid('width') and not node.has_valid('height'):
                factor = Const(graph, {'value': np.array(node.factor)}).create_node()

                shape = Shape(graph, {'name': node.name + '/shape'}).create_node()

                begin = Const(graph, {'value': np.array([2])}).create_node()
                end = Const(graph, {'value': np.array([4])}).create_node()
                stride = Const(graph, {'value': np.array([1])}).create_node()
                ss = StridedSlice(graph, {'name': node.name + '/ss_0_port', 'begin_mask': np.array([1]),
                                          'end_mask': np.array([0]), 'new_axis_mask': np.array([0]),
                                          'shrink_axis_mask': np.array([0]),
                                          'ellipsis_mask': np.array([0])}).create_node()

                mul = Mul(graph, {'name': node.name + '/factor_mul_'}).create_node()

                source = node.in_port(0).get_connection().get_source()
                source.connect(shape.in_port(0))
                shape.out_port(0).connect(ss.in_port(0))
                begin.out_port(0).connect(ss.in_port(1))
                end.out_port(0).connect(ss.in_port(2))
                stride.out_port(0).connect(ss.in_port(3))
                ss.out_port(0).connect(mul.in_port(0))
                factor.out_port(0).connect(mul.in_port(1))

                node.add_input_port(1, skip_if_exist=True)
                assert node.in_port(1).disconnected()
                mul.out_port(0).connect(node.in_port(1))

            else:
                shape = Shape(graph, {'name': node.name + '/shape'}).create_node()

                begin = Const(graph, {'value': np.array([2])}).create_node()
                end = Const(graph, {'value': np.array([4])}).create_node()
                stride = Const(graph, {'value': np.array([1])}).create_node()
                ss = StridedSlice(graph, {'name': node.name + '/ss_0_port', 'begin_mask': np.array([1]),
                                          'end_mask': np.array([0]), 'new_axis_mask': np.array([0]),
                                          'shrink_axis_mask': np.array([0]),
                                          'ellipsis_mask': np.array([0])}).create_node()

                source = node.in_port(0).get_connection().get_source()
                source.connect(shape.in_port(0))
                shape.out_port(0).connect(ss.in_port(0))
                begin.out_port(0).connect(ss.in_port(1))
                end.out_port(0).connect(ss.in_port(2))
                stride.out_port(0).connect(ss.in_port(3))

                pads_value = node.pads_begin + node.pads_end
                pads_const = Const(graph, {'value': np.array(pads_value)}).create_node()
                add = Add(graph, {'name': node.name + '/pad_add'}).create_node()
                ss.out_port(0).connect(add.in_port(0))
                add.in_port(1).connect(pads_const.out_port(0))

                if node.soft_get('shrink_factor') != 1 and node.soft_get('zoom_factor') == 1:
                    shrink_factor = node.shrink_factor
                    if shrink_factor < 1:
                        log.error('Shrink factor should be positive in node {}'.format(node.id))
                        return None

                    const = Const(graph, {'name': node.name + '/pre_shrink_sub_const',
                                          'value': np.array(-1)}).create_node()
                    sub = Add(graph, {'name': node.name + '/pre_shrink_sub'}).create_node()
                    add.out_port(0).connect(sub.in_port(0))
                    sub.in_port(1).connect(const.out_port(0))

                    const = Const(graph, {'value': np.array(1 / shrink_factor),
                                          'name': node.name + 'shrink_factor_div_const'}).create_node()
                    div = Mul(graph, {'name': node.name + 'shrink_factor_div'}).create_node()
                    sub.out_port(0).connect(div.in_port(0))
                    div.in_port(1).connect(const.out_port(0))

                    const = Const(graph, {'name': node.name + '/shrink_factor_add_one_const', 'value': np.array(1)
                                          }).create_node()
                    add = Add(graph, {'name': node.name + '/shrink_factor_add_one'}).create_node()
                    div.out_port(0).connect(add.in_port(0))
                    const.out_port(0).connect(add.in_port(1))

                    node.add_input_port(1, skip_if_exist=True)
                    assert node.in_port(1).disconnected()
                    add.out_port(0).connect(node.in_port(1))

                elif node.soft_get('shrink_factor') == 1 and node.soft_get('zoom_factor') != 1:
                    zoom_factor = node.zoom_factor
                    if zoom_factor < 1:
                        log.error('Zoom factor should be positive in node {}'.format(node.id))
                        return None

                    node['debug_message'] = 'Interpolate layer replacer may be wrong, please, try to update it in the' \
                                            ' file (openvino/tools/mo/front/InterpolateNormalizer.py at the line {}).' \
                                            ''.format(inspect.currentframe().f_lineno) + refer_to_faq_msg(100)

                    # Reshape methods can be different in some cases
                    # Commented out section represents reshape that used in deeplab-caffe
                    # Uncomment the following lines, if your model was trained with deeplab-caffe
                    # or have the same reshape method
                    # const = Const(graph, {'value': np.array(-1),
                    #                       'name': node.name + 'zoom_factor_deeplab-caffe_sub_const'}).create_node()
                    # sub = Add(graph, {'name': node.name + 'zoom_factor_deeplab-caffe_sub'}).create_node()
                    # add.out_port(0).connect(sub.in_port(0))
                    # const.out_port(0).connect(sub.in_port(1))
                    #
                    # const = Const(graph, {'value': np.array(zoom_factor - 1),
                    #                       'name': node.name + 'zoom_factor_deeplab-caffe_mul_const'}).create_node()
                    # mul = Mul(graph, {'name': node.name + 'zoom_factor_deeplab-caffe_mul'}).create_node()
                    # sub.out_port(0).connect(mul.in_port(0))
                    # const.out_port(0).connect(mul.in_port(1))
                    #
                    # sum = Add(graph, {'name': node.name + 'zoom_factor_deeplab-caffe_sum'}).create_node()
                    # add.out_port(0).connect(sum.in_port(0))
                    # mul.out_port(0).connect(sum.in_port(1))
                    #
                    # node.add_input_port(1, skip_if_exist=True)
                    # assert node.in_port(1).disconnected()
                    # sum.out_port(0).connect(node.in_port(1))

                    # Comment out the following lines if you use the reshape method from previous section
                    const = Const(graph, {'value': np.array(zoom_factor),
                                          'name': node.name + '/zoom_factor_mul_const'}).create_node()
                    mul = Mul(graph, {'name': node.name + '/zoom_factor_mul'}).create_node()

                    add.out_port(0).connect(mul.in_port(0))
                    const.out_port(0).connect(mul.in_port(1))

                    node.add_input_port(1, skip_if_exist=True)
                    assert node.in_port(1).disconnected()
                    mul.out_port(0).connect(node.in_port(1))

                elif node.soft_get('width') != 0 and node.soft_get('height') != 0:
                    const = Const(graph, {'value': np.array([node.height, node.width])}).create_node()
                    node.add_input_port(1, skip_if_exist=True)
                    assert node.in_port(1).disconnected()
                    const.out_port(0).connect(node.in_port(1))

                elif node.soft_get('shrink_factor') != 1 and node.soft_get('zoom_factor') != 1:
                    shrink_factor = node.shrink_factor
                    zoom_factor = node.zoom_factor
                    if shrink_factor < 1:
                        log.error('Shrink factor should be positive in node {}'.format(node.id))
                        return None
                    if zoom_factor < 1:
                        log.error('Zoom factor should be positive in node {}'.format(node.id))
                        return None

                    const = Const(graph, {'value': np.array(-1)}).create_node()
                    sub = Add(graph, {'name': node.name + '/shrink_zoom_factor_sub'}).create_node()
                    add.out_port(0).connect(sub.in_port(0))
                    const.out_port(0).connect(sub.in_port(1))

                    const = Const(graph, {'value': np.array(1 / (shrink_factor + 1))}).create_node()
                    div = Mul(graph, {'name': node.name + '/shrink_factor_div'}).create_node()
                    sub.out_port(0).connect(div.in_port(0))
                    const.out_port(0).connect(div.in_port(1))

                    const = Const(graph, {'value': np.array(-1),
                                          'name': node.name + 'shrink_zoom_factor_sum_const'}).create_node()
                    sum = Add(graph, {'name': node.name + '/shrink_zoom_factor_sum'}).create_node()
                    div.out_port(0).connect(sum.in_port(0))
                    const.out_port(0).connect(sum.in_port(1))

                    const = Const(graph, {'value': np.array(zoom_factor - 1)}).create_node()
                    mul = Mul(graph, {'name': node.name + '/zoom_factor_mul'}).create_node()
                    sum.out_port(0).connect(mul.in_port(0))
                    const.out_port(0).connect(mul.in_port(1))

                    sum = Add(graph, {'name': node.name + '/final_shrink_zoom_factor_sum'}).create_node()
                    div.out_port(0).connect(sum.in_port(0))
                    mul.out_port(0).connect(sum.in_port(1))

                    node.add_input_port(1, skip_if_exist=True)
                    assert node.in_port(1).disconnected()
                    sum.out_port(0).connect(node.in_port(1))
        else:
            if node.soft_get('fw') == 'caffe':
                shape = Shape(graph, {'name': node.name + '/shape'}).create_node()

                begin = Const(graph, {'value': np.array([2])}).create_node()
                end = Const(graph, {'value': np.array([4])}).create_node()
                stride = Const(graph, {'value': np.array([1])}).create_node()
                ss = StridedSlice(graph, {'name': node.name + '/ss_0_port', 'begin_mask': np.array([1]),
                                          'end_mask': np.array([0]), 'new_axis_mask': np.array([0]),
                                          'shrink_axis_mask': np.array([0]),
                                          'ellipsis_mask': np.array([0])}).create_node()

                source = node.in_port(1).get_connection().get_source()
                node.in_port(1).disconnect()
                source.connect(shape.in_port(0))
                shape.out_port(0).connect(ss.in_port(0))
                begin.out_port(0).connect(ss.in_port(1))
                end.out_port(0).connect(ss.in_port(2))
                stride.out_port(0).connect(ss.in_port(3))
                ss.out_port(0).connect(node.in_port(1))
Example #29
0
 def extract(cls, node: Node):
     axis = onnx_attr(node, 'axis', 'i', default=None)
     Add.update_node_stat(node, {'axis': axis})
     return cls.enabled
Example #30
0
    def replace_pattern(self, graph: Graph, match: Dict[str, Node]):
        group_norm_node = match['op']
        group_norm_num_input_dims = len(group_norm_node.in_port(0).data.get_shape())

        # node computing initial GroupNorm input shape
        initial_shape_op_node = Shape(graph, {'name': group_norm_node.name + '/Shape'}).create_node()
        initial_shape_op_node.in_port(0).connect(group_norm_node.in_port(0).get_source())

        initial_shape_op_node_float = Cast(
            graph, {'name': initial_shape_op_node.name + '/to_float',
                    'dst_type': data_type_str_to_np(graph.graph['cmd_params'].data_type)}).create_node()
        initial_shape_op_node.out_port(0).connect(initial_shape_op_node_float.in_port(0))

        initial_batch_dim_node = node_to_get_batch_value(initial_shape_op_node_float)
        initial_features_dim_node = node_to_get_features_dimension_value(initial_shape_op_node_float)
        initial_spatial_dims_node_int = node_to_get_spatial_dimensions_value(initial_shape_op_node)
        initial_spatial_dims_node = Cast(
            graph, {'name': initial_spatial_dims_node_int.name + '/to_float',
                    'dst_type': data_type_str_to_np(graph.graph['cmd_params'].data_type)}).create_node()
        initial_spatial_dims_node_int.out_port(0).connect(initial_spatial_dims_node.in_port(0))

        group_size_node = Const(graph, {'value': int64_array([group_norm_node.num_groups]),
                                        'name': group_norm_node.name + '/GroupSize'}).create_node()

        # calculate "features // group_size" value
        reciprocal_group_size_node = Const(graph, {'value': np.array([1.0 / group_norm_node.num_groups]),
                                                   'name': group_norm_node.name + '/ReciprocalGroupSize'}).create_node()

        c_div_g_node = Mul(graph, {}).create_node()
        c_div_g_node.in_port(0).connect(initial_features_dim_node.out_port(0))
        c_div_g_node.in_port(1).connect(reciprocal_group_size_node.out_port(0))

        batch_mul_group_size_node = Mul(graph, {}).create_node()
        batch_mul_group_size_node.in_port(0).connect(initial_batch_dim_node.out_port(0))
        batch_mul_group_size_node.in_port(1).connect(group_size_node.out_port(0))

        # create new node which concatenates several dims to one
        new_shape_node_float = new_shape_node_from_shape_nodes([batch_mul_group_size_node, c_div_g_node,
                                                                initial_spatial_dims_node])
        new_shape_node = Cast(graph,
                              {'name': new_shape_node_float.name + '/to_int64', 'dst_type': np.int64}).create_node()
        new_shape_node_float.out_port(0).connect(new_shape_node.in_port(0))

        reshape_for_mvn_node = Reshape(graph, {}).create_node()

        group_norm_node.in_port(0).get_connection().set_destination(reshape_for_mvn_node.in_port(0))
        reshape_for_mvn_node.in_port(1).connect(new_shape_node.out_port(0))

        # Reshape the gamma and beta constants to correct layout from [C] to [1,C], [1,C,1], [1,C,1,1] etc
        gamma_beta_shape = np.ones([group_norm_num_input_dims], dtype=np.int64)
        gamma_beta_shape[1] = -1

        gamma_value = group_norm_node.in_port(1).get_source().data.get_value()
        beta_value = group_norm_node.in_port(2).get_source().data.get_value()
        assert gamma_value is not None, 'The gamma should be constant'
        assert beta_value is not None, 'The beta should be constant'
        gamma_value = np.reshape(gamma_value, gamma_beta_shape)
        group_norm_node.in_port(1).get_source().data.set_value(gamma_value)
        beta_value = np.reshape(beta_value, gamma_beta_shape)
        group_norm_node.in_port(2).get_source().data.set_value(beta_value)

        # MVN
        mvn_node = MVN(graph, {'name': group_norm_node.name + '/MVN',
                               'normalize_variance': 1,
                               'eps': group_norm_node.eps,
                               'eps_mode': 'inside_sqrt'}).create_node()
        mvn_node.in_port(0).connect(reshape_for_mvn_node.out_port(0))

        # MVN axes
        _, rank = get_shape_and_rank_nodes_by_port(mvn_node.in_port(0).get_connection().get_source(),
                                                   return_as_a_scalar=True)
        rng = create_op_with_const_inputs(graph, Range, {0: int64_array(1), 2: int64_array(1)},
                                          {'name': group_norm_node.name + '/Range', 'output_type': np.int64})
        mvn_node.in_port(1).connect(rng.out_port(0))
        rng.in_port(1).connect(rank.out_port(0))

        # reshape to the initial shape before multiplying with gamma and adding beta
        reshape_to_initial_shape_node = Reshape(graph, {}).create_node()
        reshape_to_initial_shape_node.in_port(0).connect(mvn_node.out_port(0))
        reshape_to_initial_shape_node.in_port(1).connect(initial_shape_op_node.out_port(0))

        mul_node = Mul(graph, {'name': mvn_node.name + '/Mul'}).create_node()
        mul_node.in_port(0).connect(reshape_to_initial_shape_node.out_port(0))
        group_norm_node.in_port(1).get_connection().set_destination(mul_node.in_port(1))

        add_node = Add(graph, {'name': mul_node.name + '/Add'}).create_node()
        add_node.in_port(0).connect(mul_node.out_port(0))
        group_norm_node.in_port(2).get_connection().set_destination(add_node.in_port(1))

        group_norm_node.out_port(0).get_connection().set_source(add_node.out_port(0))