Ejemplo n.º 1
0
def squeeze(xgraph: XGraph, node: Node,
            quant_config: NndctQuantInfo) -> NoReturn:
    if node.in_tensors[0].ndim == 4 and len(
            node.node_attr(node.op.AttrName.DIMS)) == 1:
        attrs: Dict[str, Any] = {}
        attrs["order"] = [0, 3, 1, 2]

        # resume dimension to NCHW
        input_ops: Dict[str, List[Op]] = {}
        input_list = []
        for input in node.in_nodes:
            input_op = xgraph.get_op_by_name(input)
            input_list.append(input_op)
        input_ops["input"] = input_list
        xgraph.create_fixed_normal_op(node.name + "_i0",
                                      "transpose",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)

        attrs: Dict[str, Any] = {}
        dim = node.node_attr(node.op.AttrName.DIMS)[0]
        dim = transformed_axis("NHWC", "NCHW", ndim=4, dim=dim)
        attrs["axis"] = [dim]
        input_ops: Dict[str, List[Op]] = {}
        input_ops["input"] = [xgraph.get_op_by_name(node.name + "_i0")]
        xgraph.create_fixed_normal_op(node.name,
                                      "squeeze",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)
    else:
        to_xir("squeeze")(xgraph, node, quant_config)
Ejemplo n.º 2
0
def permute_invar_op(xop_type, xgraph: XGraph, node: Node,
                     quant_config: NndctQuantInfo) -> NoReturn:
    if not node.node_attr(node.op.AttrName.KEEP_DIMS) \
      and node.in_tensors[0].ndim == 4 \
      and len(node.node_attr(node.op.AttrName.DIMS)) == 1 \
      and node.node_attr(node.op.AttrName.DIMS)[0] != 3:
        layout = ["N", "H", "W", "C"]
        del layout[node.node_attr(node.op.AttrName.DIMS)[0]]
        # create mean which keep_dim is True
        attrs: Dict[str, Any] = {}
        attrs["axis"] = node.node_attr(node.op.AttrName.DIMS)
        attrs["keep_dims"] = True
        input_ops: Dict[str, List[Op]] = {}
        input_list = []
        for input in node.in_nodes:
            input_op = xgraph.get_op_by_name(input)
            input_list.append(input_op)
        input_ops["input"] = xgraph.create_input_fix_ops(
            input_list, node.name, quant_config)
        xgraph.create_fixed_normal_op(node.name + "_i0",
                                      xop_type,
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)

        attrs: Dict[str, Any] = {}
        if layout == ["N", "H", "C"]:
            attrs["order"] = [0, 3, 1, 2]
        else:
            attrs["order"] = [0, 3, 2, 1]

        # resume dimension to NCHW
        input_ops: Dict[str, List[Op]] = {}
        input_ops["input"] = [xgraph.get_op_by_name(node.name + "_i0")]
        xgraph.create_fixed_normal_op(node.name + "_i1",
                                      "transpose",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)

        attrs: Dict[str, Any] = {}
        if layout == ["N", "H", "C"]:
            attrs["axis"] = [3]
        else:
            attrs["axis"] = [2]
        input_ops: Dict[str, List[Op]] = {}
        input_ops["input"] = [xgraph.get_op_by_name(node.name + "_i1")]
        xgraph.create_fixed_normal_op(node.name,
                                      "squeeze",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)
    else:
        to_xir(xop_type)(xgraph, node, quant_config)
Ejemplo n.º 3
0
def avgpool(xgraph: XGraph, node: Node,
            quant_config: NndctQuantInfo) -> NoReturn:

    needScale = False
    scale = 1.0
    if node.node_attr(node.op.AttrName.KERNEL) == [3, 3]:
        needScale = True
        scale = 9.0 * 7.0 / 64.0
    elif node.node_attr(node.op.AttrName.KERNEL) == [5, 5]:
        needScale = True
        scale = 25.0 * 10.0 / 256.0
    elif node.node_attr(node.op.AttrName.KERNEL) in [[6, 6], [3, 6], [6, 3]]:
        needScale = True
        scale = 36.0 * 7.0 / 256.0
    elif node.node_attr(node.op.AttrName.KERNEL) == [7, 7]:
        needScale = True
        scale = 49.0 * 21.0 / 1024.0
    elif node.node_attr(node.op.AttrName.KERNEL) == [14, 14]:
        needScale = True
        scale = 196.0 * 21.0 / 4096.0

    if needScale:
        attrs = _get_attr_from_node(node)
        # attrs: Dict[str, Any] = {}
        # for attr_name, attr_value in node.op.attrs.items():
        #   attrs[attr_name.value] = _Converter.to_xir_attr_value(attr_name.value, attr_value.value)

        input_ops: Dict[str, List[Op]] = {}
        input_ops["input"] = [xgraph.get_op_by_name(node.in_nodes[0])]
        input_ops["input"] = xgraph.create_input_fix_ops(
            input_ops["input"], node.name, quant_config)
        xgraph.create_fixed_normal_op(node.name + "_i0",
                                      "avgpool2d",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)

        scale = [scale]
        xgraph.create_fixed_const_op(name=node.name + "_i1",
                                     data=np.array(scale, dtype=np.float32),
                                     quant_info=quant_config)

        input_ops: Dict[str, List[Op]] = {}
        input_ops["input"] = [
            xgraph.get_op_by_name(node.name + "_i0"),
            xgraph.get_op_by_name(node.name + "_i1")
        ]
        xgraph.create_fixed_normal_op(node.name,
                                      "mul",
                                      quant_config,
                                      input_ops=input_ops)
    else:
        to_xir("avgpool2d")(xgraph, node, quant_config)
Ejemplo n.º 4
0
def resize(xgraph: XGraph, node: Node,
           quant_config: NndctQuantInfo) -> NoReturn:
    """
  resize is a macro operator, including concat , resize
  """
    attrs: Dict[str, Any] = {}
    # attrs["scale"] = node.node_attr(node.op.AttrName.SCALE)

    attrs["align_corners"] = node.node_attr(node.op.AttrName.ALIGN_CORNERS)
    attrs["half_pixel_centers"] = node.node_attr(
        node.op.AttrName.HALF_PIXEL_CENTERS)
    attrs["mode"] = node.node_attr(node.op.AttrName.MODE)
    # attrs["mode"] = {0: "NEAREST", 3: "BILINEAR"}.get(attrs["mode"])
    size = node.node_attr(node.op.AttrName.SIZE)
    scale = node.node_attr(node.op.AttrName.SCALE)
    # if size[0] == 0 and size[1] == 0:
    if all([s == 0 for s in size]):
        attrs["scale"] = scale
        input_ops: Dict[str, List["xir.Op"]] = {}
        input_list = []
        for input in node.in_nodes:
            input_op = xgraph.get_op_by_name(input)
            input_list.append(input_op)
        input_ops["input"] = xgraph.create_input_fix_ops(
            input_list, node.name, quant_config)
        xgraph.create_fixed_normal_op(node.name,
                                      "resize",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)
    else:
        sub_pack_op, pack_list = _pack(xgraph, node, "size", size,
                                       quant_config)
        input_ops: Dict[str, List["xir.Op"]] = {}
        input_ops["size"] = [sub_pack_op]
        input_list = []
        for input in node.in_nodes:
            input_op = xgraph.get_op_by_name(input)
            input_list.append(input_op)
        input_ops["input"] = input_list
        input_ops["input"] = [
            op for op in input_ops["input"]
            if op.get_name() not in [i.get_name() for i in pack_list]
        ]
        input_ops["input"] = xgraph.create_input_fix_ops(
            input_ops["input"], node.name, quant_config)
        xgraph.create_fixed_normal_op(node.name,
                                      "resize",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)
Ejemplo n.º 5
0
def shape(xgraph: XGraph, node: Node,
          quant_config: NndctQuantInfo) -> NoReturn:
    r""" nndct shape is a macro operator, including shape, stridedslice 
      """
    # raise NotImplementedError("shape")
    input_list = []
    shape_input_ops: Dict[str, List[Op]] = {}
    for input in node.in_nodes:
        input_op = xgraph.get_op_by_name(input)
        input_list.append(input_op)
    shape_input_ops["input"] = input_list

    sub_op_shape = xgraph.create_fixed_normal_op(node.name + "_i0",
                                                 "shape",
                                                 quant_config,
                                                 input_ops=shape_input_ops)

    attrs: Dict[str, Any] = {}
    strided_slice_input_ops: Dict[str, List[Op]] = {}
    strided_slice_input_ops["input"] = [sub_op_shape]
    dim = node.node_attr(node.op.AttrName.AXIS)
    attrs["begin"] = [dim]
    attrs["end"] = [dim + 1]
    xgraph.create_fixed_normal_op(node.name,
                                  "strided_slice",
                                  quant_config,
                                  attrs=attrs,
                                  input_ops=strided_slice_input_ops)
Ejemplo n.º 6
0
def zeros(xgraph: XGraph, node: Node,
          quant_config: NndctQuantInfo) -> NoReturn:
    shape = node.node_attr(node.op.AttrName.SHAPE)
    data = np.zeros(shape,
                    dtype=_Converter.to_numpy_dtype(node.out_tensors[0].dtype))
    xgraph.create_fixed_const_op(name=node.name,
                                 data=data,
                                 quant_info=quant_config)
Ejemplo n.º 7
0
def avgpool(xgraph: XGraph, node: Node,
            quant_config: NndctQuantInfo) -> NoReturn:

    scale = 1.0
    if node.node_attr(node.op.AttrName.KERNEL) == [3, 3]:
        scale = 9.0 * 7.0 / 64.0
    elif node.node_attr(node.op.AttrName.KERNEL) == [5, 5]:
        scale = 25.0 * 10.0 / 256.0
    elif node.node_attr(node.op.AttrName.KERNEL) in [[6, 6], [3, 6], [6, 3]]:
        scale = 36.0 * 7.0 / 256.0
    elif node.node_attr(node.op.AttrName.KERNEL) == [7, 7]:
        scale = 49.0 * 21.0 / 1024.0
    elif node.node_attr(node.op.AttrName.KERNEL) == [14, 14]:
        scale = 196.0 * 21.0 / 4096.0
    else:
        rec = node.node_attr(node.op.AttrName.KERNEL)[0] * node.node_attr(
            node.op.AttrName.KERNEL)[1]
        max_factor = math.ceil(math.log(rec * 128, 2))
        diff = 1.0
        multi_factor = 0.0
        shift_factor = 0.0
        for shift_factor_ in range(max_factor):
            factor = round((2**shift_factor_) / rec)
            diff_ = abs(factor / (2**shift_factor_) - 1 / rec)
            if diff_ < diff:
                multi_factor = factor
                diff = diff_
                shift_factor = shift_factor_
        scale = rec * multi_factor / (2**shift_factor)

    attrs = _get_xir_attr_from_node(node)
    # attrs: Dict[str, Any] = {}
    # for attr_name, attr_value in node.op.attrs.items():
    #   attrs[attr_name.value] = _Converter.to_xir_attr_value(attr_name.value, attr_value.value)

    input_ops: Dict[str, List["xir.Op"]] = {}
    input_ops["input"] = [xgraph.get_op_by_name(node.in_nodes[0])]
    input_ops["input"] = xgraph.create_input_fix_ops(input_ops["input"],
                                                     node.name, quant_config)
    xgraph.create_fixed_normal_op(node.name + "_i0",
                                  "avgpool2d",
                                  quant_config,
                                  attrs=attrs,
                                  input_ops=input_ops)

    scale = [scale]
    xgraph.create_fixed_const_op(name=node.name + "_i1",
                                 data=np.array(scale, dtype=np.float32),
                                 quant_info=quant_config)

    input_ops: Dict[str, List["xir.Op"]] = {}
    input_ops["input"] = [
        xgraph.get_op_by_name(node.name + "_i0"),
        xgraph.get_op_by_name(node.name + "_i1")
    ]
    xgraph.create_fixed_normal_op(node.name,
                                  "mul",
                                  quant_config,
                                  input_ops=input_ops)
Ejemplo n.º 8
0
def const_xop(xgraph: XGraph, node: Node,
              quant_config: NndctQuantInfo) -> NoReturn:
    data = node.node_attr(node.op.AttrName.DATA)
    data_type = np.dtype(node.out_tensors[0].dtype)

    if not isinstance(data, list):
        data = [data]

    xgraph.create_fixed_const_op(name=node.name,
                                 data=np.array(data, dtype=data_type),
                                 quant_info=quant_config)
Ejemplo n.º 9
0
def reshape(xgraph: XGraph, node: Node,
            quant_config: NndctQuantInfo) -> NoReturn:
    r""" nndct reshape is a macro operator, including pack, reshape
      """
    # raise NotImplementedError("reshape")

    if node.in_tensors[0].ndim != 4 or node.in_tensors[
            0].layout == Tensor.Layout.NHWC:
        shape = node.node_attr(node.op.AttrName.SHAPE)
        sub_op_pack, pack_list = _pack(xgraph, node, "shape", shape,
                                       quant_config)
        input_ops: Dict[str, List[Op]] = {}
        input_ops["shape"] = [sub_op_pack]
        input_ops["input"] = [xgraph.get_op_by_name(node.in_nodes[0])]
        xgraph.create_fixed_normal_op(node.name,
                                      "reshape",
                                      quant_config,
                                      input_ops=input_ops)

    else:
        shape = node.node_attr(node.op.AttrName.SHAPE)
        sub_op_pack, pack_list = _pack(xgraph, node, "shape", shape,
                                       quant_config)
        attrs: Dict[str, Any] = {}
        # NHWC -> NCHW
        attrs["order"] = [0, 3, 1, 2]
        input_ops: Dict[str, List[Op]] = {}
        input_ops["input"] = [xgraph.get_op_by_name(node.in_nodes[0])]
        xgraph.create_fixed_normal_op(node.name + "_i0",
                                      "transpose",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)

        input_ops: Dict[str, List[Op]] = {}
        input_ops["shape"] = [sub_op_pack]
        input_ops["input"] = [xgraph.get_op_by_name(node.name + "_i0")]
        xgraph.create_fixed_normal_op(node.name,
                                      "reshape",
                                      quant_config,
                                      input_ops=input_ops)
Ejemplo n.º 10
0
def binary_op(op_type: str, xgraph: XGraph, node: Node,
              quant_config: NndctQuantInfo):
    input, other = node.node_attr(node.op.AttrName.INPUT), node.node_attr(
        node.op.AttrName.OTHER)
    if isinstance(input, Tensor) and (not isinstance(other, Tensor)):
        operand1 = xgraph.get_op_by_name(input.node.name)
        dtype = other.dtype if isinstance(other, np.ndarray) else type(other)
        operand2 = np.ones(input.shape, dtype=dtype) * other
        operand2 = xgraph.create_const_op(f"{node.name}_other", operand2)
    else:
        operand1 = xgraph.get_op_by_name(input.node.name)
        operand2 = xgraph.get_op_by_name(other.node.name)

    input_ops: Dict[str, List["xir.Op"]] = {}
    input_ops["input"] = [operand1, operand2]
    input_ops["input"] = xgraph.create_input_fix_ops(input_ops["input"],
                                                     node.name, quant_config)
    xgraph.create_fixed_normal_op(node.name,
                                  op_type,
                                  quant_config,
                                  input_ops=input_ops)
Ejemplo n.º 11
0
def reshape(xgraph: XGraph, node: Node,
            quant_config: NndctQuantInfo) -> NoReturn:
    r""" nndct reshape is a macro operator, including pack, reshape
      """
    shape = node.node_attr(node.op.AttrName.SHAPE)
    sub_op_pack, pack_list = _pack(xgraph, node, "shape", shape, quant_config)
    input_ops: Dict[str, List["xir.Op"]] = {}
    input_ops["shape"] = [sub_op_pack]
    input_ops["input"] = [xgraph.get_op_by_name(node.in_nodes[0])]
    xgraph.create_fixed_normal_op(node.name,
                                  "reshape",
                                  quant_config,
                                  input_ops=input_ops)
Ejemplo n.º 12
0
def reduction_mean(xgraph: XGraph, node: Node,
                   quant_config: NndctQuantInfo) -> NoReturn:

    attrs = _get_xir_attr_from_node(node)

    input_ops: Dict[str, List[Op]] = {}
    input_ops["input"] = [xgraph.get_op_by_name(node.in_nodes[0])]
    input_ops["input"] = xgraph.create_input_fix_ops(input_ops["input"],
                                                     node.name, quant_config)
    if len(node.node_attr(node.op.AttrName.DIMS)) == 1:
        xgraph.create_fixed_normal_op(node.name + "_i0",
                                      "reduction_mean",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)
        scale = calculate_op_scale(
            node.in_tensors[0].shape[node.node_attr(node.op.AttrName.DIMS)[0]],
            node)
        scale = [scale]
        xgraph.create_fixed_const_op(name=node.name + "_i1",
                                     data=np.array(scale, dtype=np.float32),
                                     quant_info=quant_config)

        input_ops: Dict[str, List[Op]] = {}
        input_ops["input"] = [
            xgraph.get_op_by_name(node.name + "_i0"),
            xgraph.get_op_by_name(node.name + "_i1")
        ]
        xgraph.create_fixed_normal_op(node.name,
                                      "mul",
                                      quant_config,
                                      input_ops=input_ops)
    else:
        xgraph.create_fixed_normal_op(node.name,
                                      "reduction_mean",
                                      quant_config,
                                      attrs=attrs,
                                      input_ops=input_ops)
Ejemplo n.º 13
0
def const_xop(xgraph: XGraph, node: Node,
              quant_config: NndctQuantInfo) -> NoReturn:
    data = node.node_attr(node.op.AttrName.DATA)
    data_type = np.dtype(node.out_tensors[0].dtype)
    data_type = np.float32 if data_type == np.float64 else data_type
    if not isinstance(data, list):
        data = [data]

    data = np.array(data, dtype=data_type)
    data = np.transpose(
        data, node.transpose_out_order) if node.transpose_out_order else data
    xgraph.create_fixed_const_op(name=node.name,
                                 data=data,
                                 quant_info=quant_config)
Ejemplo n.º 14
0
def sub(xgraph: XGraph, node: Node, quant_config: NndctQuantInfo) -> NoReturn:
    operand1, operand2 = node.node_attr(
        node.op.AttrName.INPUT), node.node_attr(node.op.AttrName.OTHER)
    _sub(xgraph, node.name, operand1, operand2, quant_config)