Пример #1
0
    def avgpool2d(self, node: onnx.NodeProto) -> spec.Spec:
        input_shapes, output_shapes, attributes = self.get_inputs_for_gen_spec(
            node)

        assert len(input_shapes) == len(output_shapes) == 1
        input_shape = input_shapes[0]
        output_shape = output_shapes[0]

        # ONNX AveragePool assumes n-d array as its kernel.
        if node.op_type == 'AveragePool':
            assert len(attributes['kernel_shape']) == 2
        elif node.op_type == 'GlobalAveragePool':
            attributes = {'kernel_shape': (input_shape[2:])}

        operator_spec_option = spec.AveragePool2d(
            input=HeightWidth(input_shape[2], input_shape[3]),
            kernel=HeightWidth(*attributes['kernel_shape']),
            stride=HeightWidth(*attributes.get('strides', (1, 1))),
            dilation=HeightWidth(*attributes.get('dilations', (1, 1))),
            batch=input_shape[0],
            channel=output_shape[1],
            padding=Padding(*attributes.get('pads', (0, 0, 0, 0))),
        )
        return spec.Spec(spec_utils.node_identifier(node),
                         operator_spec_option)
Пример #2
0
 def gelu(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, _ = self.get_inputs_for_gen_spec(node)
     assert len(input_shapes) == 1
     input_shape = input_shapes[0]
     operator_spec_option = spec.Gelu(shape=[*input_shape])
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #3
0
 def expand(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, output_shapes, _ = self.get_inputs_for_gen_spec(node)
     input_shape = input_shapes[0]
     output_shape = output_shapes[0]
     operator_spec_option = spec.Expand(input_shape=[*input_shape],
                                        output_shape=[*output_shape])
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #4
0
 def matmul(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, _ = self.get_inputs_for_gen_spec(node)
     assert len(input_shapes) == 2
     lhs_shape, rhs_shape = [*input_shapes[0]], [*input_shapes[1]]
     operator_spec_option = spec.MatMul(lhs_shape=lhs_shape,
                                        rhs_shape=rhs_shape)
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #5
0
 def concatenation(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, attributes = self.get_inputs_for_gen_spec(node)
     operator_spec_option = spec.Concatenation(
         tensors=list(map(list, input_shapes)),
         axis=spec_utils.implicit_axis_to_explicit(attributes['axis'],
                                                   input_shapes[0]))
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #6
0
 def pad(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, _ = self.get_inputs_for_gen_spec(node)
     input_shape = input_shapes[0]
     assert len(input_shape) == 4
     pads = self.get_initializer_for_gen_spec(node.input[1])
     operator_spec_option = spec.Pad(shape=[*input_shape],
                                     pad=spec_utils.horizontal_pads(*pads))
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #7
0
 def slice(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, _ = self.get_inputs_for_gen_spec(node)
     input_shape = input_shapes[0]
     starts = self.get_initializer_for_gen_spec(node.input[1])
     axes = self.get_initializer_for_gen_spec(node.input[3])
     operator_spec_option = spec.Slice(shape=[*input_shape],
                                       offset=spec_utils.slice_offset_dict(
                                           starts, axes, input_shape))
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #8
0
 def transpose(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, attributes = self.get_inputs_for_gen_spec(node)
     assert len(input_shapes) == 1
     input_shape = input_shapes[0]
     operator_spec_option = spec.Transpose(
         shape=[*input_shape],
         permutation=spec_utils.implicit_axis_to_explicit(
             [*attributes['perm']], input_shape))
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #9
0
 def gemm(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, attributes = self.get_inputs_for_gen_spec(node)
     alpha = attributes.get('alpha', float(1.0))
     beta = attributes.get('beta', float(1.0))
     m, k, n = spec_utils.gemm_shapes(input_shapes,
                                      attributes.get('transA', int(0)),
                                      attributes.get('transB', int(0)))
     operator_spec_option = spec.Gemm(alpha=alpha, beta=beta, m=m, k=k, n=n)
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #10
0
 def flatten(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, attributes = self.get_inputs_for_gen_spec(node)
     assert len(input_shapes) == 1
     input_shape = input_shapes[0]
     operator_spec_option = spec.Flatten(
         shape=[*input_shape],
         axis=spec_utils.implicit_axis_to_explicit(attributes['axis'],
                                                   input_shape))
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #11
0
    def softmax(self, node: onnx.NodeProto) -> spec.Spec:
        input_shapes, _, attributes = self.get_inputs_for_gen_spec(node)
        assert len(input_shapes) == 1
        input_shape = input_shapes[0]

        operator_spec_option = spec.Softmax(
            input_shape=[*input_shape],
            beta=attributes.get('beta', float(1.0)),
            axis=spec_utils.implicit_axis_to_explicit(attributes['axis'],
                                                      input_shape))
        return spec.Spec(spec_utils.node_identifier(node),
                         operator_spec_option)
Пример #12
0
    def resize(self, node: onnx.NodeProto) -> spec.Spec:
        input_shapes, _, _ = self.get_inputs_for_gen_spec(node)
        input_shape = input_shapes[0]
        roi = self.get_initializer_for_gen_spec(node.input[1])
        scales = self.get_initializer_for_gen_spec(node.input[2])
        try:
            sizes = self.get_initializer_for_gen_spec(node.input[3])
        except IndexError:
            sizes = []

        operator_spec_option = spec.Resize(shape=[*input_shape],
                                           roi=roi,
                                           scales=scales,
                                           sizes=sizes)
        return spec.Spec(spec_utils.node_identifier(node),
                         operator_spec_option)
Пример #13
0
    def multi_node_lp_norm(
            self,
            node: onnx.NodeProto) -> Optional[Tuple[spec.Spec, List[str]]]:
        """
        Starts from 'Div', traverse up to find the form of l2norm.
        Returns all inputs of l2norm, consist of multi node

        LpNormalization is not defined in ONNX Operator spec, so that we should traverse the graph:

        Input --> ReduceL2 --> Clip --> Expand --> D
              -----------------------------------> iv --> Output
        """
        inputs_of_lp_norm: List[str] = []
        for input in node.input:
            # exclude input from initializer
            if input not in self.producer_map:
                continue

            prev_node = self.producer_map[input]
            if prev_node.op_type != 'Expand':
                continue

            pprev_node = self.producer_map[prev_node.input[0]]
            if pprev_node.op_type != 'Clip':
                continue

            ppprev_node = self.producer_map[pprev_node.input[0]]
            if ppprev_node.op_type != 'ReduceL2':
                continue
            p = 2

            inputs_of_lp_norm.append(ppprev_node.input[0])
            input_shapes, _, attributes = self.get_inputs_for_gen_spec(
                ppprev_node)
            axis = attributes['axes'][0]

            operator_spec_option = spec.LpNorm(input_shape=[*input_shapes[0]],
                                               p=p,
                                               axis=axis)
            return spec.Spec(spec_utils.node_identifier(node),
                             operator_spec_option), inputs_of_lp_norm
Пример #14
0
    def convtranspose2d(self, node: onnx.NodeProto) -> spec.Spec:
        input_shapes, output_shapes, attributes = self.get_inputs_for_gen_spec(
            node)
        input_shape = input_shapes[0]
        output_shape = output_shapes[0]

        # TODO assert -> warning. refer to https://docs.python.org/3/tutorial/errors.html#user-defined-exceptions
        # ONNX Conv assumes n-d array as its kernel.
        assert len(attributes['kernel_shape']) == 2

        operator_spec_option = spec.TrasnposeConv(
            input=HeightWidth(input_shape[2], input_shape[3]),
            kernel=HeightWidth(*attributes['kernel_shape']),
            stride=HeightWidth(*attributes.get('strides', (1, 1))),
            dilation=HeightWidth(*attributes.get('dilations', (1, 1))),
            batch=input_shape[0],
            input_channel=input_shape[1],
            output_channel=output_shape[1],
            groups=attributes.get('group', 1),
            padding=Padding(*attributes.get('pads', (0, 0, 0, 0))),
        )
        return spec.Spec(spec_utils.node_identifier(node),
                         operator_spec_option)
Пример #15
0
    def clip(self, node: onnx.NodeProto) -> spec.Spec:
        input_shapes, _, _ = self.get_inputs_for_gen_spec(node)
        input_shape = input_shapes[0]

        kwargs = {}
        if node.attribute:
            for attr in node.attribute:
                if attr.name == "min":
                    kwargs['min'] = float(attr.f)
                elif attr.name == "max":
                    kwargs['max'] = float(attr.f)
        else:
            assert len(node.input) == 3
            for idx, node_input in enumerate(node.input):
                if idx == 1:
                    try:
                        kwargs['min'] = float(
                            numpy_helper.to_array(
                                self.initializer[node_input]))
                    except KeyError:
                        kwargs['min'] = None

                elif idx == 2:
                    try:
                        kwargs['max'] = float(
                            numpy_helper.to_array(
                                self.initializer[node_input]))
                    except KeyError:
                        kwargs['max'] = None

        if not kwargs:
            raise Exception('Empty min and/or max.')

        operator_spec_option = spec.Clip(input_shape=[*input_shape], **kwargs)
        return spec.Spec(spec_utils.node_identifier(node),
                         operator_spec_option)
Пример #16
0
    def depthtospace(self, node: onnx.NodeProto) -> spec.Spec:
        input_shapes, _, attributes = self.get_inputs_for_gen_spec(node)

        assert len(input_shapes) == 1
        input_shape = input_shapes[0]

        mode = attributes.get('mode', 'DCR')
        if mode == 'CRD':
            mode = 'ColumnRowDepth'
        elif mode == 'DCR':
            mode = 'DepthColumnRow'
        else:
            raise Exception(
                'Unknown mode: %s. Mode must be one of "DCR" or "CRD".' % mode)

        operator_spec_option = spec.DepthToSpace(
            batch=input_shape[0],
            height=input_shape[2],
            width=input_shape[3],
            channel=input_shape[1],
            block_size=attributes['blocksize'],
            mode=mode)
        return spec.Spec(spec_utils.node_identifier(node),
                         operator_spec_option)
Пример #17
0
    def lp_norm(self, node: onnx.NodeProto) -> spec.Spec:
        input_shape, _, attrs = self.get_inputs_for_gen_spec(node)
        operator_spec_option = spec.LpNorm(input_shape=[*input_shape], **attrs)

        return spec.Spec(spec_utils.node_identifier(node),
                         operator_spec_option)
Пример #18
0
 def layer_norm(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, attributes = self.get_inputs_for_gen_spec(node)
     operator_spec_option = spec.LayerNorm(input_shape=[*input_shapes[0]],
                                           eps=attributes['epsilon'])
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)
Пример #19
0
 def div(self, node: onnx.NodeProto) -> spec.Spec:
     input_shapes, _, _ = self.get_inputs_for_gen_spec(node)
     input_shape = input_shapes[0]
     operator_spec_option = spec.Div(shape=[*input_shape])
     return spec.Spec(spec_utils.node_identifier(node),
                      operator_spec_option)