Exemple #1
0
def fuse_Transpose_into_Constant(g):
    """
    Fuse Transpose layers into the Constant layers before

    :param g: the onnx graph
    """
    node_to_remove = []
    for node in g.node:
        if node.op_type != 'Transpose':
            continue
        prev_node = helper.find_node_by_output_name(g, node.input[0])
        if prev_node is None or prev_node.op_type != 'Constant':
            continue
        
        pre_shape, data_list = helper.constant_to_list(prev_node)
        w = np.reshape(data_list, pre_shape)
        w = w.transpose(node.attribute[0].ints)
        new_shape = w.shape
        w = w.flatten()
        
        new_tensor = onnx.helper.make_tensor(
            name=prev_node.name+'_data',
            data_type=prev_node.attribute[0].t.data_type,
            dims=new_shape,
            vals=w.tolist()
        )
        new_node = onnx.helper.make_node(
            'Constant',
            [],
            [node.output[0]],
            name=node.output[0],
            value=new_tensor
        )
        
        value_between = helper.find_value_by_name(g, prev_node.output[0])
        value_type = value_between.type.tensor_type.elem_type
        g.value_info.remove(value_between)

        g.node.extend([new_node])
        node_to_remove.append(node)
        node_to_remove.append(prev_node)
        
        if new_node.output[0] not in [i.name for i in g.value_info]:
            new_value = onnx.helper.make_tensor_value_info(
                name=new_node.output[0],
                elem_type=value_type,
                shape=new_shape 
                )
            g.value_info.extend([new_value])
            if new_node.output[0]:
                val_info_to_del = helper.find_value_by_name(g, new_node.output[0])
                g.value_info.remove(val_info_to_del)
    
    for node in node_to_remove:
        g.node.remove(node)
    
    topological_sort(g)
def replace_Sum_with_Adds(g):
    node_to_del = []

    for node in g.node:
        # Check for sum
        if node.op_type != 'Sum':
            continue
        # Check for input number
        if len(node.input) == 1:
            # If input number is 1, delete the sum node.
            following_nodes = helper.find_following_nodes_by_input_value_name(g, node.output[0])
            for following_node in following_nodes:
                modhelper.replace_node_input(following_node, node.output[0], node.input[0])
            node_to_del.append(node)
            if helper.find_value_by_name(node.output[0]) is not None:
                g.value_info.remove(helper.find_value_by_name(node.output[0]))
        elif len(node.input) == 2:
            # If input number is 2, replace it with add.
            node.op_type = 'Add'
            continue
        elif len(node.input) > 2:
            # If input number is larger than 2, replace it with n-1 add.
            input_count = len(node.input)
            # First node has 2 inputs
            first_node = onnx.helper.make_node(
                "Add",
                [node.input[0], node.input[1]],
                [node.output[0] + '_replacement_1'],
                name=node.name + '_replacement_1'
            )
            # Last node has the same output as the original sum node
            last_node = onnx.helper.make_node(
                "Add",
                [node.output[0] + '_replacement_' + str(input_count - 2), node.input[input_count - 1]],
                [node.output[0]],
                name=node.name
            )
            g.node.extend([first_node, last_node])
            for i in range(2, input_count - 1):
                new_node = onnx.helper.make_node(
                    "Add",
                    [node.output[0] + '_replacement_' + str(i - 1), node.input[i]],
                    [node.output[0] + '_replacement_' + str(i)],
                    name=node.name + '_replacement_' + str(i)
                )
                g.node.extend([new_node])
            node_to_del.append(node)
        else:
            logging.error("Sum node must have at least 1 input.")
            quit(1)

    while node_to_del:
        g.node.remove(node_to_del.pop())

    topological_sort(g)
def inference_split_shape(g):
    processed = False
    for node in g.node:
        if node.op_type != 'Split':
            continue

        input_val_info = helper.find_value_by_name(g, node.input[0])
        if not input_val_info:
            input_val_info = helper.find_input_by_name(g, node.input[0])
        if not input_val_info:
            continue

        _, input_shape = helper.find_size_shape_from_value(input_val_info)
        if not input_shape:
            continue

        output_val_names = list(node.output)
        output_vals = [
            helper.find_value_by_name(g, val_name)
            for val_name in output_val_names
        ]

        output_shapes = [
            helper.find_size_shape_from_value(output_val)[1]
            for output_val in output_vals
        ]
        if not any([len(s) == 0 for s in output_shapes]):
            continue

        for att in node.attribute:
            if att.name == 'axis':
                axis = att.i
            else:
                split = list(att.ints)

        new_output_vals = []
        for i in range(len(output_val_names)):
            new_shape = list(input_shape)
            new_shape[axis] = split[i]
            new_output_val = onnx.helper.make_tensor_value_info(
                output_val_names[i], input_val_info.type.tensor_type.elem_type,
                new_shape)
            new_output_vals.append(new_output_val)

        for val in output_vals:
            if val is not None:
                g.value_info.remove(val)
        g.value_info.extend(new_output_vals)

        processed = True

    return processed
Exemple #4
0
def inference_cov_shape(g):
    processed = False
    for node in g.node:
        if node.op_type != 'Conv':
            continue
        input_value_info = helper.find_value_by_name(g, node.input[0])
        if not input_value_info:
            input_value_info = helper.find_input_by_name(g, node.input[0])
        if not input_value_info:
            continue

        kernel_value_info = helper.find_value_by_name(g, node.input[1])
        output_value_info = helper.find_value_by_name(g, node.output[0])
        if not output_value_info:
            output_value_info = helper.find_output_by_name(g, node.output[0])

        if output_value_info and \
            helper.get_shape_from_value_info(output_value_info):
            continue

        _, kernel_shape = helper.find_size_shape_from_value(kernel_value_info)
        _, input_shape = helper.find_size_shape_from_value(input_value_info)
        if not input_shape or not kernel_shape:
            continue
        strides = helper.get_attribute_by_name(node, 'strides').ints
        pads = helper.get_attribute_by_name(node, 'pads').ints
        dilation = helper.get_attribute_by_name(node, 'dilations').ints

        # Pytorch model has the case where strides only have one number
        if len(strides) == 1:
            return strides.append(strides[0])
        if len(dilation) == 1:
            return dilation.append(dilation[0])

        H = math.floor((input_shape[2]+pads[0]+pads[2]-\
            dilation[0]*(kernel_shape[2]-1)-1)/strides[0]+1)
        W = math.floor((input_shape[3]+pads[1]+pads[3]-\
            dilation[1]*(kernel_shape[3]-1)-1)/strides[1]+1)
        output_shape = [input_shape[0], kernel_shape[0], H, W]

        new_output_value_info = onnx.helper.make_tensor_value_info(
            node.output[0], input_value_info.type.tensor_type.elem_type,
            output_shape)

        processed = True

        if output_value_info:
            g.value_info.remove(output_value_info)
        g.value_info.extend([new_output_value_info])

    return processed
Exemple #5
0
def rename_all_node_name(g):
    """
    rename all nodes:

        new_name = old_name + "_kn"

    :param g: the onnx graph
    """

    for node in g.node:
        new_node_name = node.name + "_kn"
        new_node_output0_name = node.output[0] + "_kn"

        # in order to keep same output node name, skip if it is output node.
        output_value_info = helper.find_output_by_name(g, node.output[0])
        if output_value_info != None:
            continue

        # rename  the input of all the following nodes
        following_nodes = helper.find_following_nodes_by_input_value_name(
            g, node.output[0])
        for following_node in following_nodes:
            replace_node_input(following_node, node.output[0],
                               new_node_output0_name)

        # rename value info
        value_info = helper.find_value_by_name(g, node.output[0])
        if value_info != None:
            value_info.name = new_node_output0_name

        # rename node
        node.output[0] = new_node_output0_name
        node.name = new_node_name
Exemple #6
0
def replace_initializer_with_Constant(g):
    """
    Replace initializers with Constant and a corresponding value_info
    If the initializer has related input, remove it.

    :param g: the onnx graph
    """

    input_map = {i.name: i for i in g.input}
    for tensor in g.initializer:
        # Check for the initializer related input and remove it
        if tensor.name in input_map:
            value_info = input_map[tensor.name]
            g.input.remove(value_info)
        following_nodes = helper.find_nodes_by_input_name(g, tensor.name)
        for i, node in enumerate(following_nodes):
            new_name = tensor.name + "_duplicated_No" + str(
                i) if i > 0 else tensor.name
            modhelper.replace_node_input(node, tensor.name, new_name)
            new_node = onnx.helper.make_node("Constant", [], [new_name],
                                             name=new_name,
                                             value=tensor)
            # Add node to lists
            g.node.extend([new_node])

        # if value info already exists, remove it as well.
        value_info = helper.find_value_by_name(g, tensor.name)
        if value_info is not None:
            g.value_info.remove(value_info)

    # Remove original initializer
    while len(g.initializer) != 0:
        g.initializer.pop()

    topological_sort(g)
def inference_resize_shape(g):
    for node in g.node:
        if node.op_type != 'Resize':
            continue

        output_value = helper.find_value_by_name(g, node.output[0])
        output_value = helper.find_output_by_name(
            g, node.output[0]) if output_value is None else output_value
        if output_value is not None:
            continue

        # currently, only support 4 input
        if len(node.input) == 4:  # input: X, roi, scales, sizes
            shape_node = helper.find_node_by_output_name(g, node.input[3])
            if shape_node.op_type != 'Constant':
                continue

            _, shape_value = helper.constant_to_list(shape_node)
            output_value = onnx.helper.make_tensor_value_info(
                node.output[0], onnx.TensorProto.FLOAT,
                [int(v) for v in shape_value])
            g.value_info.extend([output_value])

            return True
    return False
Exemple #8
0
def replace_Squeeze_with_Reshape(g):
    """
    Replace Squeeze nodes with Reshape node.

    :param g: the input graph
    """
    node_to_remove = []
    for node in g.node:
        # Find Squeeze node
        if node.op_type != 'Squeeze':
            continue
        # Get the shape and Construct the shape
        output_value = helper.find_value_by_name(g, node.output[0])
        if output_value is None:
            output_value = helper.find_output_by_name(g, node.output[0])
        if output_value is None:
            raise RuntimeError("Cannot get shape for Squeeze")
        shape = [
            dim.dim_value for dim in output_value.type.tensor_type.shape.dim
        ]
        const_node = helper.list_to_constant(node.name + "_shape",
                                             [len(shape)], shape)
        # Construct the Reshape layer with same input, output and name.
        new_node = onnx.helper.make_node("Reshape",
                                         [node.input[0], node.name + "_shape"],
                                         node.output,
                                         name=node.name)
        # Append constructed nodes and append old node to remove_list
        g.node.extend([const_node, new_node])
        node_to_remove.append(node)
    # Remove old nodes
    for node in node_to_remove:
        g.node.remove(node)
    # Topological sort
    topological_sort(g)
def add_nop_bn_after(g, value_names):
    """Add do-nothing BatchNormalization nodes after the given value info. It will\\
    take the given names as the inputs of the new node and replace the inputs\\
    of the following nodes.

    :param g: the graph\\
    :param value_names: a list of string which are the names of value_info.
    """
    for value_name in value_names:
        # Find the value first
        value = helper.find_value_by_name(g, value_name)
        if value is None:
            value = helper.find_input_by_name(g, value_name)
        if value is None:
            value = helper.find_output_by_name(g, value_name)
        if value is None:
            print("Cannot find an value_info named {}".format(value_name))
            continue
        # Get the channel number from value info
        shape = helper.get_shape_from_value_info(value)
        channel = shape[1]
        # Construct 4 weights
        node_name = value_name + "_nop_bn"
        ones = [1.0] * channel
        zeros = [0.0] * channel
        scale_node = helper.list_to_constant(node_name + "_scale", [channel],
                                             ones)
        bias_node = helper.list_to_constant(node_name + "_bias", [channel],
                                            zeros)
        mean_node = helper.list_to_constant(node_name + "_mean", [channel],
                                            zeros)
        var_node = helper.list_to_constant(node_name + "_var", [channel], ones)
        # Construct BN node
        bn_node = onnx.helper.make_node("BatchNormalization", [
            value_name, scale_node.output[0], bias_node.output[0],
            mean_node.output[0], var_node.output[0]
        ], [node_name],
                                        name=node_name)
        # Reconnect the graph
        following_nodes = helper.find_following_nodes_by_input_value_name(
            g, value_name)
        if len(following_nodes) > 0:
            for following_node in following_nodes:
                replace_node_input(following_node, value_name, node_name)
        else:
            # If the node is the output, replace the output with the previous input.
            new_value = onnx.helper.make_tensor_value_info(
                node_name, value.type.tensor_type.elem_type, shape)
            output_values = []
            while len(g.output):
                output_values.append(g.output.pop())
            while output_values:
                output_value = output_values.pop()
                if output_value.name == value_name:
                    g.output.extend([new_value])
                else:
                    g.output.extend([output_value])
        # Add node to the graph
        g.node.extend([bn_node, scale_node, bias_node, mean_node, var_node])
    topological_sort(g)
Exemple #10
0
def polish_RESIZE_input_param_node(g, resize_node_name):
    resize_node = helper.find_node_by_output_name(g, resize_node_name)

    shape_data_node = helper.find_node_by_output_name(g, resize_node.input[3])
    shape_data = helper.constant_to_numpy(shape_data_node).astype(int)

    # handle 0 batch size which is invalid
    if shape_data[0] == 0:
        shape_data[0] = 1

    pre_node_output_value_info = helper.find_value_by_name(
        g, resize_node.input[0])
    ori_shape = np.array([
        pre_node_output_value_info.type.tensor_type.shape.dim[0].dim_value,
        pre_node_output_value_info.type.tensor_type.shape.dim[1].dim_value,
        pre_node_output_value_info.type.tensor_type.shape.dim[2].dim_value,
        pre_node_output_value_info.type.tensor_type.shape.dim[3].dim_value
    ])

    resize_node.input.remove(resize_node.input[3])

    resize_scales = np.array(shape_data / ori_shape).astype(float)
    resize_scale_node = helper.list_to_constant(
        'resize_scales_node_' + resize_node.name,
        resize_scales.shape,
        resize_scales,
        data_type=onnx.helper.TensorProto.FLOAT)

    resize_node.input[2] = resize_scale_node.name
    g.node.extend([resize_scale_node])

    other.topological_sort(g)
Exemple #11
0
def fuse_Add_into_Conv(g):
    """
    Fuse Transpose layers into the Constant layers before

    :param g: the onnx graph
    """
    node_to_remove = []
    for node in g.node:
        if node.op_type != 'Add':
            continue
        conv_node = helper.find_node_by_output_name(g, node.input[0])
        cons_node = helper.find_node_by_output_name(g, node.input[1])
        if conv_node is None or cons_node is None:
            continue
        if conv_node.op_type != 'Conv' or cons_node.op_type != 'Constant':
            continue
        if len(conv_node.input) > 2:
            continue
        # This layer should be fused. Connect constant node into convolution node.
        add_node = node
        conv_node.input.extend([cons_node.output[0]])
        old_value = helper.find_value_by_name(g, conv_node.output[0])
        conv_node.output[0] = add_node.output[0]
        # Remove origin conv_node_output
        g.value_info.remove(old_value)
        # Remove current node
        node_to_remove.append(add_node)
    # Apply changes to the model
    for node in node_to_remove:
        g.node.remove(node)
Exemple #12
0
 def add_bn_after(prev_node):
     # Get the channel number from value info
     value_name = prev_node.output[0]
     value = helper.find_value_by_name(g, value_name)
     shape = helper.get_shape_from_value_info(value)
     channel = shape[1]
     # Construct 4 weights
     node_name = value_name + "_nop_bn"
     ones = [1.0] * channel
     zeros = [0.0] * channel
     scale_node = helper.list_to_constant(node_name + "_scale", [channel], ones)
     bias_node = helper.list_to_constant(node_name + "_bias", [channel], zeros)
     mean_node = helper.list_to_constant(node_name + "_mean", [channel], zeros)
     var_node = helper.list_to_constant(node_name + "_var", [channel], ones)
     # Construct BN node
     bn_node = onnx.helper.make_node(
         "BatchNormalization",
         [value_name,
         scale_node.output[0],
         bias_node.output[0],
         mean_node.output[0],
         var_node.output[0]],
         [node_name],
         name = node_name
     )
     # Reconnect the graph
     replace_node_input(n, value_name, node_name)
     # Add node to the graph
     g.node.extend([bn_node, scale_node, bias_node, mean_node, var_node])
Exemple #13
0
def change_first_conv_from_bgr_to_rgb(m):
    """For input channel format BGR model, use this function to change the first
    conv weight to adapt the input into RGB.

    :param m: the model proto
    """
    # Check for first node.
    g = m.graph
    input_name = g.input[0].name
    first_nodes = helper.find_following_nodes_by_input_value_name(
        g, input_name)
    if len(first_nodes) > 1:
        return False
    first_node = first_nodes[0]
    # Now we have the first node. Check this first node.
    if first_node.op_type != 'Conv':
        return False
    weight_value = helper.find_value_by_name(g, first_node.input[1])
    weight_shape = helper.get_shape_from_value_info(weight_value)
    if weight_shape[1] != 3:
        return False
    # Do weight shuffle
    weight_node = helper.find_node_by_output_name(g, weight_value.name)
    weight_np = helper.constant_to_numpy(weight_node)
    b_channel = np.expand_dims(weight_np[:, 0, :, :], axis=1)
    g_channel = np.expand_dims(weight_np[:, 1, :, :], axis=1)
    r_channel = np.expand_dims(weight_np[:, 2, :, :], axis=1)
    new_np = np.concatenate((r_channel, g_channel, b_channel), axis=1)
    new_node = helper.numpy_to_constant(weight_value.name, new_np)
    # Replace the weight and topological sort
    g.node.remove(weight_node)
    g.node.extend([new_node])
    other.topological_sort(g)
    return True
Exemple #14
0
def replace_Reshape_with_Flatten(g):
    """
    Replace Reshape node into Flatten node if applicable.

    :param g: the onnx graph
    """
    node_to_remove = []
    for node in g.node:
        if node.op_type != 'Reshape':
            continue
        found = False
        # Flatten must be followed by Gemm
        for i in g.node:
            if len(i.input) == 0 or i.input[0] != node.output[0]:
                continue
            if i.op_type == 'Gemm':
                found = True
                break
        if not found:
            continue
        shape_node = helper.find_node_by_output_name(g, node.input[1])
        if shape_node.op_type != 'Constant':
            continue
        # Replace it
        node.op_type = "Flatten"
        for _ in range(len(node.attribute)):
            node.attribute.pop()
        shape_value = helper.find_value_by_name(g, shape_node.output[0])
        node.input.pop()
        node_to_remove.append(shape_node)
        g.value_info.remove(shape_value)
    for node in node_to_remove:
        g.node.remove(node)
Exemple #15
0
def fuse_conv_and_add_into_conv(g):
    node_to_del = []
    for node in g.node:
        if node.op_type != 'Add':
            continue
        add_node = node
        add_const = helper.find_node_by_output_name(g, add_node.input[1])
        if not add_const or add_const.op_type != 'Constant':
            continue

        conv_node = helper.find_node_by_output_name(g, add_node.input[0])
        if not conv_node or conv_node.op_type != 'Conv':
            continue
        weight_node = helper.find_node_by_output_name(g, conv_node.input[1])
        if not weight_node or weight_node.op_type != 'Constant':
            continue

        m_dim = weight_node.attribute[0].t.dims[0]
        if add_const.attribute[0].t.dims != [1, m_dim, 1, 1]:
            continue
        for _ in range(3):
            add_const.attribute[0].t.dims.remove(1)

        conv_node.input.extend([add_const.output[0]])
        conv_node.output.pop()
        conv_node.output.extend([add_node.output[0]])

        node_to_del.append(add_node)

        old_add_const_val_info = helper.find_value_by_name(
            g, add_node.input[1])
        old_conv_output_val_info = helper.find_value_by_name(
            g, conv_node.output[0])
        if old_add_const_val_info:
            g.value_info.remove(old_add_const_val_info)
        if old_conv_output_val_info:
            g.value_info.remove(old_conv_output_val_info)

        new_add_const_val_info = onnx.helper.make_tensor_value_info(
            add_const.output[0], add_const.attribute[0].t.data_type,
            add_const.attribute[0].t.dims)
        g.value_info.extend([new_add_const_val_info])

    while node_to_del:
        g.node.remove(node_to_del.pop())

    topological_sort(g)
Exemple #16
0
def add_nop_conv_after(g, value_names):
    """Add do-nothing depthwise Conv nodes after the given value info. It will\\
    take the given names as the inputs of the new node and replace the inputs\\
    of the following nodes.

    :param g: the graph\\
    :param value_names: a list of string which are the names of value_info.
    """
    for value_name in value_names:
        # Find the value first
        value = helper.find_value_by_name(g, value_name)
        if value is None:
            value = helper.find_input_by_name(g, value_name)
        if value is None:
            value = helper.find_output_by_name(g, value_name)
        if value is None:
            print("Cannot find an value_info named {}".format(value_name))
            continue
        # Get the channel number from value info
        shape = helper.get_shape_from_value_info(value)
        channel = shape[1]
        # Construct 4 weights
        node_name = value_name + "_nop_conv"
        ones = [1.0] * channel
        weight_node = helper.list_to_constant(node_name + "_weight",
                                              [channel, 1, 1, 1], ones)
        # Construct BN node
        conv_node = onnx.helper.make_node("Conv",
                                          [value_name, weight_node.output[0]],
                                          [node_name],
                                          name=node_name,
                                          dilations=[1, 1],
                                          group=channel,
                                          kernel_shape=[1, 1],
                                          pads=[0, 0, 0, 0],
                                          strides=[1, 1])
        # Reconnect the graph
        following_nodes = helper.find_following_nodes_by_input_value_name(
            g, value_name)
        if len(following_nodes) > 0:
            for following_node in following_nodes:
                replace_node_input(following_node, value_name, node_name)
        else:
            # If the node is the output, replace the output with the previous input.
            new_value = onnx.helper.make_tensor_value_info(
                node_name, value.type.tensor_type.elem_type, shape)
            output_values = []
            while len(g.output):
                output_values.append(g.output.pop())
            while output_values:
                output_value = output_values.pop()
                if output_value.name == value_name:
                    g.output.extend([new_value])
                else:
                    g.output.extend([output_value])
        # Add node to the graph
        g.node.extend([conv_node, weight_node])
    topological_sort(g)
def replace_depthwise_1x1_with_bn(g):
    """Replace 1x1 DepthwiseConv node into BN node if applicable.

    :param g: the onnx graph
    """
    node_to_remove = []
    for node in g.node:
        # Check op_type
        if node.op_type != 'Conv':
            continue
        # Check attributes
        attr_map = {attr.name: attr for attr in node.attribute}
        if "group" not in attr_map or attr_map["group"].i == 1:
            continue
        if attr_map["kernel_shape"].ints[0] != 1 or attr_map["kernel_shape"].ints[1] != 1:
            continue
        if "pads" in attr_map and sum(attr_map["pads"].ints) != 0:
            continue
        # Check scale
        scale_node = helper.find_node_by_output_name(g, node.input[1])
        if scale_node is None or scale_node.attribute[0].t.dims[1] != 1:
            continue
        scale_node.attribute[0].t.dims.pop()
        scale_node.attribute[0].t.dims.pop()
        scale_node.attribute[0].t.dims.pop()
        scale_info = helper.find_value_by_name(g, node.input[1])
        if scale_info is not None:
            scale_info.type.tensor_type.shape.dim.pop()
            scale_info.type.tensor_type.shape.dim.pop()
            scale_info.type.tensor_type.shape.dim.pop()
        # Check bias
        if len(node.input) == 3:
            bias_name = node.input[2]
        else:
            bias_name = node.name + "_bias"
            bias_node = helper.list_to_constant(bias_name, [attr_map["group"].i], [0.0] * attr_map["group"].i)
            g.node.extend([bias_node])
        # Construct mean and vars
        mean_name = node.name + "_mean"
        mean_node = helper.list_to_constant(mean_name, [attr_map["group"].i], [0.0] * attr_map["group"].i)
        var_name = node.name + "_var"
        var_node = helper.list_to_constant(var_name, [attr_map["group"].i], [1.0] * attr_map["group"].i)
        g.node.extend([mean_node, var_node])
        # Convert
        bn_node = onnx.helper.make_node(
            op_type='BatchNormalization',
            inputs=[node.input[0], node.input[1], bias_name, mean_name, var_name],
            outputs=node.output,
            name=node.name,
            epsilon=0.00001,
            momentum=0.9
            )
        g.node.extend([bn_node])
        node_to_remove.append(node)
    for node in node_to_remove:
        g.node.remove(node)
    topological_sort(g)
Exemple #18
0
def add_output_to_value_info(g):
    """
    If output does not present in value_info, copy one

    :param g: the onnx graph
    """
    for output in g.output:
        if helper.find_value_by_name(g, output.name) is None:
            g.value_info.extend([output])
Exemple #19
0
def inference_upsample_shape(g):
    """For onnx v1.4.1+, onnx cannot inference upsample output shape. Let's\\
    do it ourselves. This function only inference the next upsample without\\
    output shape each time.

    :param g: the graph\\
    :return: True if any Upsample shape is generated. Otherwise, False.
    """
    for node in g.node:
        if node.op_type != 'Upsample':
            continue
        output_value = helper.find_value_by_name(g, node.output[0])
        if output_value is None:
            output_value = helper.find_output_by_name(g, node.output[0])
        if output_value and helper.get_shape_from_value_info(output_value):
            continue
        # Get input shape
        input_value = helper.find_value_by_name(g, node.input[0])
        if input_value is None:
            continue
            #raise RuntimeError("Shape for {} has not been generated.".format(node.input[0]))
        if not helper.get_shape_from_value_info(input_value):
            continue
            #raise RuntimeError("Shape for {} is empty.".format(node.input[0]))
        input_shape = helper.get_shape_from_value_info(input_value)
        # Get upsample weight
        weight_node = helper.find_node_by_output_name(g, node.input[1])
        weight_shape, weight = helper.constant_to_list(weight_node)
        if len(input_shape) != weight_shape[0]:
            raise RuntimeError(
                "Unmatch input shape and weight shape: {} vs {}".format(
                    input_shape, weight_shape))
        # Calculate shape
        output_shape = list(input_shape)
        for i in range(len(output_shape)):
            output_shape[i] = int(input_shape[i] * weight[i])
        output_value = onnx.helper.make_tensor_value_info(
            node.output[0], input_value.type.tensor_type.elem_type,
            output_shape)
        g.value_info.extend([output_value])
        return True
    return False
def replace_initializer_with_Constant(g):
    """
    Replace initializers with Constant and a corresponding value_info

    :param g: the onnx graph
    """
    # Creat a set of existed node names
    node_names = set()
    for node in g.node:
        node_names.add(node.name)
    # Unused initializers should be removed
    unused_initializer = set()
    for tensor in g.initializer:
        unused_initializer.add(tensor.name)
    for node in g.node:
        for in_value in node.input:
            if in_value in unused_initializer:
                unused_initializer.remove(in_value)

    input_map = {i.name: i for i in g.input}
    for tensor in g.initializer:
        if tensor.name in unused_initializer:
            value_info = input_map[tensor.name]
            g.input.remove(value_info)
            continue
        # Convert init to a constant node
        if tensor.name not in node_names:
            new_name = tensor.name
        else:
            new_name = tensor.name + '_2'
            following_nodes = helper.find_nodes_by_input_name(g, tensor.name)
            for node in following_nodes:
                modhelper.replace_node_input(node, tensor.name, new_name)
        new_node = onnx.helper.make_node(
            "Constant",
            [],
            [new_name],
            name=new_name,
            value=tensor
        )
        # Add node to lists
        g.node.extend([new_node])
        # Add value info to lists
        value_info = input_map[tensor.name]
        g.value_info.extend([value_info])
        # Remove original input value info
        g.input.remove(value_info)
        # if value info exists, remove it as well.
        value_info = helper.find_value_by_name(g, tensor.name)
        if value_info is not None:
            g.value_info.remove(value_info)
    # Remove original initializer
    while len(g.initializer) != 0:
        g.initializer.pop()
Exemple #21
0
def inference_resize_shape(g):
    for node in g.node:
        if node.op_type != 'Resize':
            continue

        output_value = helper.find_value_by_name(g, node.output[0])
        output_value = helper.find_output_by_name(
            g, node.output[0]) if output_value is None else output_value
        if output_value is not None:
            continue

        if len(node.input) == 4:  # input: X, roi, scales, sizes
            shape_node = helper.find_node_by_output_name(g, node.input[3])
            if shape_node.op_type != 'Constant':
                continue

            _, shape_value = helper.constant_to_list(shape_node)
            output_value = onnx.helper.make_tensor_value_info(
                node.output[0], onnx.TensorProto.FLOAT,
                [int(v) for v in shape_value])
            g.value_info.extend([output_value])
            return True
        else:
            # If output shape is not given, inference from scales
            # Get the input shape
            input_value = helper.find_value_by_name(g, node.input[0])
            if input_value is None:
                continue
            shape_value = helper.get_shape_from_value_info(input_value)
            scales_node = helper.find_node_by_output_name(g, node.input[2])
            if scales_node.op_type != 'Constant':
                continue
            _, scales_value = helper.constant_to_list(scales_node)
            for i in range(len(shape_value)):
                shape_value[i] *= scales_value[i]
            output_value = onnx.helper.make_tensor_value_info(
                node.output[0], onnx.TensorProto.FLOAT,
                [int(v) for v in shape_value])
            g.value_info.extend([output_value])
            return True
    return False
Exemple #22
0
def add_bn_on_skip_branch(g):
    for n in g.node:
        # Find merge node (Add)
        if n.op_type != 'Add':
            continue
        if len(n.input) != 2:
            continue
        # TODO: Still need to consider more cases
        # Check if skip branch exist
        input_node_a = helper.find_node_by_output_name(g, n.input[0])
        output_of_input_node_a = helper.find_nodes_by_input_name(
            g, input_node_a.output[0])
        input_node_b = helper.find_node_by_output_name(g, n.input[1])
        output_of_input_node_b = helper.find_nodes_by_input_name(
            g, input_node_b.output[0])
        if len(output_of_input_node_a) == 1 and len(
                output_of_input_node_b) == 1:
            continue
        if len(output_of_input_node_a) == 2:
            split_node = input_node_a
        elif len(output_of_input_node_b) == 2:
            split_node = input_node_b
        else:
            continue
        # Get the channel number from value info
        value_name = split_node.output[0]
        value = helper.find_value_by_name(g, value_name)
        shape = helper.get_shape_from_value_info(value)
        channel = shape[1]
        # Construct 4 weights
        node_name = value_name + "_nop_bn"
        ones = [1.0] * channel
        zeros = [0.0] * channel
        scale_node = helper.list_to_constant(node_name + "_scale", [channel],
                                             ones)
        bias_node = helper.list_to_constant(node_name + "_bias", [channel],
                                            zeros)
        mean_node = helper.list_to_constant(node_name + "_mean", [channel],
                                            zeros)
        var_node = helper.list_to_constant(node_name + "_var", [channel], ones)
        # Construct BN node
        bn_node = onnx.helper.make_node("BatchNormalization", [
            value_name, scale_node.output[0], bias_node.output[0],
            mean_node.output[0], var_node.output[0]
        ], [node_name],
                                        name=node_name)
        # Reconnect the graph
        replace_node_input(n, value_name, node_name)
        # Add node to the graph
        g.node.extend([bn_node, scale_node, bias_node, mean_node, var_node])
    topological_sort(g)
Exemple #23
0
def replace_shape_with_constant(g):
    """Replace Shape with Constant.\\
    This is the first step of reshape constant folding.

    :param g: the input graph\\
    :return: if anything modified, return true.
    """
    node_to_remove = []
    for node in g.node:
        # Find a Shape
        if node.op_type != 'Shape':
            continue
        # Check its input
        input_value = helper.find_value_by_name(g, node.input[0])
        if input_value is None:
            input_value = helper.find_input_by_name(g, node.input[0])
        if input_value is None or len(
                input_value.type.tensor_type.shape.dim) == 0:
            continue
        # Check for case where dimension could be 0 or -1
        tmp = True
        for d in input_value.type.tensor_type.shape.dim:
            tmp = tmp and (d.dim_value > 0)
        if not tmp:
            continue
        # Repalce it
        input_shape = [
            d.dim_value for d in input_value.type.tensor_type.shape.dim
        ]
        node_name = node.output[0]
        new_node = helper.list_to_constant(node_name, [len(input_shape)],
                                           input_shape)
        g.node.extend([new_node])
        node_to_remove.append(node)

        # if the input value_info is not used by other node
        # delete this input value_info
        val_info_used = sum(
            [input_value.name in node.input for node in g.node])
        if val_info_used == 1:
            g.value_info.remove(input_value)

    replaced = True if len(node_to_remove) > 0 else False

    for node in node_to_remove:
        g.node.remove(node)

    topological_sort(g)

    return replaced
Exemple #24
0
def replace_ConstantOfShape_with_constant(g):
    """Replace Shape with Constant.\\
    This is the first step of reshape constant folding.

    :param g: the input graph\\
    :return: if anything modified, return true.
    """
    node_to_remove = []
    for node in g.node:
        # Find a Shape
        if node.op_type != 'ConstantOfShape':
            continue
        # Check  input
        input_value = helper.find_value_by_name(g, node.input[0])
        if input_value is None:
            input_value = helper.find_input_by_name(g, node.input[0])
        if input_value is None or len(
                input_value.type.tensor_type.shape.dim) == 0:
            continue

        # Replace to constant node
        pre_node = helper.find_node_by_output_name(g, node.input[0])
        _, target_shape = helper.constant_to_list(pre_node)

        value = helper.get_attribute_by_name(node, 'value').i

        node_name = node.output[0]
        new_node = helper.list_to_constant(node_name, [target_shape[0]],
                                           [value] * target_shape[0])

        g.node.extend([new_node])

        # remove old node
        node_to_remove.append(node)

        # delete value_info
        val_info_used = sum(
            [input_value.name in node.input for node in g.node])
        if val_info_used == 1:
            g.value_info.remove(input_value)

    replaced = True if len(node_to_remove) > 0 else False

    for node in node_to_remove:
        g.node.remove(node)

    topological_sort(g)

    return replaced
Exemple #25
0
def rename_output_name(g, original_name, new_name):
    # Output
    output_value = helper.find_output_by_name(g, original_name)
    if output_value is None:
        logging.error("Cannot find output value named " + original_name)
        return
    output_value.name = new_name
    # Value Info
    value_info = helper.find_value_by_name(g, original_name)
    if value_info is not None:
        value_info.name = new_name
    # Node output
    node = helper.find_node_by_output_name(g, original_name)
    node.output[0] = new_name
    # Node input
    nodes = helper.find_nodes_by_input_name(g, original_name)
    for node in nodes:
        replace_node_input(node, original_name, new_name)
def replace_average_pool_with_GAP(g):
    """
    Replace AveragePool nodes with GlobalAveragePool node when available.

    :param g: the input graph
    """
    node_to_remove = []
    for node in g.node:
        # Find a average pool layer
        if node.op_type != 'AveragePool':
            continue
        # Check attributes
        not_replace = False
        for attr in node.attribute:
            if attr.name == 'pads':
                if list(attr.ints) != [0, 0, 0, 0]:
                    not_replace = True
                    break
            if attr.name == 'kernel_shape':
                kernel_shape = list(attr.ints)
                value_info = helper.find_value_by_name(g, node.input[0])
                if value_info is None:
                    not_replace = True
                    break
                input_shape = []
                for dim in value_info.type.tensor_type.shape.dim:
                    input_shape.append(dim.dim_value)
                if input_shape[-2:] != kernel_shape:
                    not_replace = True
                    break
        if not_replace:
            continue
        # Replace it with GlobalAveragePool
        new_node = onnx.helper.make_node(
            "GlobalAveragePool",
            node.input,
            node.output,
            name=node.name
        )
        g.node.extend([new_node])
        node_to_remove.append(node)
    for node in node_to_remove:
        g.node.remove(node)
    topological_sort(g)
Exemple #27
0
def fuse_consecutive_reducemean(g):
    node_to_del = []
    for node in g.node:
        # Find consecutive ReduceMean
        if node.op_type != 'ReduceMean':
            continue
        pre_node = helper.find_node_by_output_name(g, node.input[0])
        if pre_node is None or pre_node.op_type != 'ReduceMean':
            continue
        # Check attributes
        pre_keepdims = helper.get_var_attribute_by_name(
            pre_node, 'keepdims', 'int')
        pre_axes = helper.get_list_attribute_by_name(pre_node, 'axes', 'int')
        cur_keepdims = helper.get_var_attribute_by_name(
            node, 'keepdims', 'int')
        cur_axes = helper.get_list_attribute_by_name(node, 'axes', 'int')
        if pre_keepdims != 0 or cur_keepdims != 0:
            continue
        axes = sorted(pre_axes + cur_axes)
        if axes != [2, 3]:
            continue
        # Merge two ReduceMean into GlobalAveragePool.
        new_gap_node = onnx.helper.make_node('GlobalAveragePool',
                                             [pre_node.input[0]],
                                             [node.output[0] + '_intermedia'],
                                             name=node.name + '_gap')
        new_flatten_node = onnx.helper.make_node(
            'Flatten', [node.output[0] + '_intermedia'], [node.output[0]],
            name=node.name + '_flatten',
            axis=1)

        # Clean up
        g.node.extend([new_gap_node, new_flatten_node])
        node_to_del.extend([pre_node, node])
        mid_val_info = helper.find_value_by_name(g, node.input[0])
        if mid_val_info:
            g.value_info.remove(mid_val_info)

    while node_to_del:
        node = node_to_del.pop()
        g.node.remove(node)

    topological_sort(g)
Exemple #28
0
def fuse_MatMul_and_Add_into_Gemm(g):
    """
    Fuse MatMul and Add layers into a new Gemm layers.

    :param g: the onnx graph
    :raises ValueError: MatMul must be followed by an Add node
    """
    node_to_remove = []
    node_to_add = []
    for node in g.node:
        if node.op_type != 'MatMul':
            continue
        add_node = None
        for i in g.node:
            if not i.input:
                continue
            if i.input[0] == node.output[0]:
                add_node = i
                break
        value_to_remove = helper.find_value_by_name(g, node.output[0])
        if add_node is None or value_to_remove is None or add_node.op_type != 'Add':
            continue
        input_list = node.input
        input_list.append(add_node.input[1]),
        new_node = onnx.helper.make_node(
            "Gemm",
            input_list,
            add_node.output,
            name=node.name,
            alpha=1.0,
            beta=1.0,
            transA=0,
            transB=0
        )
        node_to_add.append(new_node)
        node_to_remove.append(node)
        node_to_remove.append(add_node)
        g.value_info.remove(value_to_remove)
    for node in node_to_remove:
        g.node.remove(node)
    g.node.extend(node_to_add)
Exemple #29
0
def transpose_B_in_Gemm(g):
    """
    If transB is set in Gemm, transpose it

    :param g: the onnx graph
    """
    for node in g.node:
        if node.op_type != 'Gemm':
            continue
        do_it = False
        for attr in node.attribute:
            if attr.name == "transB":
                if attr.i == 1:
                    attr.i = 0
                    do_it = True
                    break
        if not do_it:
            continue
        # Transpose the weight and its output value
        w_node = helper.find_node_by_output_name(g, node.input[1])
        w_output = helper.find_value_by_name(g, node.input[1])
        dim_0 = w_output.type.tensor_type.shape.dim[0].dim_value
        dim_1 = w_output.type.tensor_type.shape.dim[1].dim_value
        w_output.type.tensor_type.shape.dim[0].dim_value = dim_1
        w_output.type.tensor_type.shape.dim[1].dim_value = dim_0
        w_node.attribute[0].t.dims[0] = dim_1
        w_node.attribute[0].t.dims[1] = dim_0
        if w_node.attribute[0].t.raw_data:
            raw_data = w_node.attribute[0].t.raw_data
            fl_data = [i[0] for i in struct.iter_unpack('f', raw_data)]
        else:
            fl_data = w_node.attribute[0].t.float_data
        w = np.reshape(fl_data, (dim_0, dim_1))
        w = w.transpose((1, 0)).flatten()
        if w_node.attribute[0].t.raw_data:
            buf = struct.pack('%sf' % len(w), *w)
            w_node.attribute[0].t.raw_data = buf
        else:
            for i in range(len(fl_data)):
                w_node.attribute[0].t.float_data[i] = w[i]
Exemple #30
0
def fuse_consecutive_transposes(g):
    node_to_del = []
    for node in g.node:
        if node.op_type != 'Transpose':
            continue
        pre_node = helper.find_node_by_output_name(g, node.input[0])
        if pre_node.op_type != 'Transpose':
            continue

        pre_permutation = list(pre_node.attribute[0].ints)
        cur_permutation = list(node.attribute[0].ints)
        if len(pre_permutation) != len(cur_permutation):
            continue

        new_permutation = []
        for ind in cur_permutation:
            new_permutation.append(pre_permutation[ind])
        
        new_trans_node = onnx.helper.make_node(
            'Transpose',
            [pre_node.input[0]],
            [node.output[0]],
            name=node.name,
            perm=new_permutation
        )
        
        g.node.extend([new_trans_node])
        node_to_del.extend([pre_node, node])
        
        mid_val_info = helper.find_value_by_name(g, node.input[0])
        if mid_val_info:
            g.value_info.remove(mid_val_info)
    
    while node_to_del:
        node = node_to_del.pop()
        g.node.remove(node)
    
    topological_sort(g)