コード例 #1
0
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        for op in traverse.filter_nodes(traverse.listup_operators(graph),
                                        Linear):
            x = op.inputs["x"]
            w = op.inputs["w"]
            y = op.outputs["y"]

            flag_changed = True
            op.remove_all()
            a_filter = Axis()

            if x.ndim == 2:
                w, = ReinterpretAxis(None,
                                     in_order=OrderNC,
                                     out_order=Order([Axis.C, a_filter]))(w)
                new_y, = Tensordot(None, axes=[Axis.C, a_filter])(x, w)

            elif x.ndim == 4:
                w, = ReinterpretAxis(
                    None,
                    in_order=OrderNHWC,
                    out_order=Order([Axis.C, Axis.H, Axis.W, a_filter]))(w)
                new_y, = Tensordot(None,
                                   axes=[[Axis.H, Axis.W, Axis.C],
                                         [Axis.H, Axis.W, a_filter]])(x, w)

            else:
                raise NotImplementedError

            OptimizeRule.replace_variable(graph, new_y.transpose_like(y), y)

        return graph, flag_changed
コード例 #2
0
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        for op in traverse.filter_nodes(traverse.listup_operators(graph),
                                        Convolution2D):  # type: Convolution2D
            x = op.inputs["x"]
            w = op.inputs["w"]
            y = op.outputs["y"]
            flag_changed = True
            op.remove_all()

            a_filter, a_kh, a_kw = Axis(), Axis(), Axis()
            w, = ReinterpretAxis(None,
                                 in_order=OrderNHWC,
                                 out_order=Order(
                                     [Axis.C, a_kh, a_kw, a_filter]))(w)

            if op.WH == 1 and op.WW == 1 and op.stride == (
                    1, 1) and op.padding == (0, 0):
                # Projection
                col, = ReinterpretAxis(
                    None,
                    in_order=OrderNHWC,
                    out_order=Order([Axis.N, Axis.H, Axis.W, a_filter]))(x)

                new_y, = Tensordot(None,
                                   [[a_filter], [a_kh, a_kw, a_filter]])(col,
                                                                         w)

            elif op.WH == x.shape_dict[Axis.H] and op.WW == x.shape_dict[
                    Axis.W] and op.padding == (0, 0):
                # Global convolution
                col, = ReinterpretAxis(None,
                                       in_order=OrderNHWC,
                                       out_order=Order(
                                           [Axis.N, a_kh, a_kw, a_filter]))(x)

                new_y, = Tensordot(
                    None, [[[a_kh, a_kw, a_filter], [a_kh, a_kw, a_filter]],
                           [a_kh, a_kw, a_filter]])(col, w)

            else:
                # General convolution
                col, = Im2Col(None,
                              ksize=op.ksize,
                              stride=op.stride,
                              padding=op.padding,
                              dilation_rate=op.dilation_rate)(x)
                col, = ReinterpretAxis(
                    None,
                    in_order=OrderNHWC,
                    out_order=Order([Axis.N, Axis.H, Axis.W, a_filter]))(col)

                new_y, = Tensordot(None,
                                   [[a_filter], [a_kh, a_kw, a_filter]])(col,
                                                                         w)

            new_y = new_y.transpose(y.order)
            OptimizeRule.replace_variable(graph, new_y, y)

        return graph, flag_changed
コード例 #3
0
    def optimize_operator(self, graph: Graph, op: Reshape):
        x = op.inputs["x"]
        y = op.outputs["y"]

        if x.order == y.order and x.shape == y.shape:
            _remove_unary_operator(graph, op)
            return True

        if x.shape == y.shape:
            op.remove_all()
            y_dummy, = ReinterpretAxis(None,
                                       in_order=x.order,
                                       out_order=y.order)(x)
            y_dummy.replace(y)
            return True

        if isinstance(x, ConstantVariable) and x.output_from is None:
            _remove_unary_operator(graph, op)
            x.change_order(y.order)
            return True

        if all([
                y not in graph.outputs,
                all(x.stride_dict[axis] == y.stride_dict[axis] for axis in
                    [axis for axis in x.order.axes if axis in y.order.axes]),
                all(isinstance(op2, Elementwise) for op2 in y.input_to)
        ]):
            _remove_unary_operator(graph, op)
            return True

        return False
コード例 #4
0
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        for op in traverse.filter_nodes(
                traverse.listup_operators(graph),
                Deconvolution2D):  # type: Deconvolution2D
            x = op.inputs["x"]
            w = op.inputs["w"]
            y = op.outputs["y"]
            flag_changed = True
            op.remove_all()

            a_filter, a_kh, a_kw = Axis(), Axis(), Axis()
            w, = ReinterpretAxis(None,
                                 in_order=OrderNHWC,
                                 out_order=Order(
                                     [Axis.C, a_kh, a_kw, a_filter]))(w)
            x, = ReinterpretAxis(None,
                                 in_order=OrderNHWC,
                                 out_order=Order(
                                     [Axis.N, Axis.H, Axis.W, a_filter]))(x)

            col, = Tensordot(None, axes=a_filter)(x, w)
            col = col.transpose(
                Order([Axis.N, Axis.H, Axis.W, a_kh, a_kw, Axis.C]))
            col = col.reshape(shape=[*col.shape[0:3],
                                     mul(col.shape[3:6])],
                              order=OrderNHWC)

            new_y, = Col2Im(None,
                            ksize=op.ksize,
                            stride=op.stride,
                            padding=op.padding)(col)
            OptimizeRule.replace_variable(graph, new_y.transpose_like(y), y)

        return graph, flag_changed
コード例 #5
0
ファイル: basic_math.py プロジェクト: 255BITS/webdnn
def _convert_mul(converter: ChainerConverter, c_op: chainer.functions.math.basic_math.Mul):
    x1 = converter.get_variable(c_op.inputs[0])
    x2 = converter.get_variable(c_op.inputs[1])

    x1, = ReinterpretAxis(None, x1.order, OrderNC)(x1)
    x2, = ReinterpretAxis(None, x2.order, OrderCN)(x2)

    y, = Linear(None)(x1, x2)
    converter.set_variable(c_op.outputs[0](), y)
コード例 #6
0
    def optimize_operator(self, graph: Graph, op: ReinterpretAxis):
        x = op.inputs["x"]
        y = op.outputs["y"]

        if len(x.input_to) == 1 and x.output_from is None:
            op.remove_all()

            if isinstance(x, ConstantVariable):
                x = ConstantVariable(x.data, y.order)

                if y in graph.outputs:
                    index = graph.outputs.index(y)
                    graph.outputs.remove(y)
                    graph.outputs.insert(index, x)

                else:
                    y.replace(x)
            else:
                assert x in graph.inputs

                index = graph.inputs.index(x)
                graph.inputs.remove(x)
                graph.inputs.insert(index, y)

            return True

        if op.parameters["in_order"] == op.parameters["out_order"]:
            _remove_unary_operator(graph, op)
            return True

        flag_changed = False
        for axis1, axis2 in zip(op.parameters["in_order"].axes,
                                op.parameters["out_order"].axes):
            is_resolved1 = not (isinstance(axis1, AxisVar)
                                and axis1.value is None)
            is_resolved2 = not (isinstance(axis2, AxisVar)
                                and axis2.value is None)

            if is_resolved1 and not is_resolved2:
                axis2.unify(axis1)
                flag_changed = True

            elif not is_resolved1 and is_resolved2:
                axis1.unify(axis2)
                flag_changed = True

        if flag_changed:
            return True

        return False
コード例 #7
0
ファイル: gen_math_ops.py プロジェクト: newpouy/webdnn
def matmul_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
    a = converter.get_variable(tf_op.inputs[0])
    b = converter.get_variable(tf_op.inputs[1])
    transposed_a = tf_op.get_attr("transpose_a")
    transposed_b = tf_op.get_attr("transpose_b")

    if a.ndim > 2 or b.ndim > 2:
        raise NotImplementedError(
            "[TensorFlowConverter] Currently, MatMul is supported only 2D * 2D case."
        )

    c_axes = []
    if transposed_a:
        c_axes.append(a.order.axes[-1])
        a_axis_K = a.order.axes[-2]

        if a.order != OrderCN:
            a, = ReinterpretAxis(None, in_order=a.order, out_order=OrderCN)(a)

    else:
        c_axes.append(a.order.axes[-2])
        a_axis_K = a.order.axes[-1]

        if a.order != OrderNC:
            a, = ReinterpretAxis(None, in_order=a.order, out_order=OrderNC)(a)

    if transposed_b:
        b_axis_K = b.order.axes[-1]

        c_axes.append(AxisVar())
        if b.order != OrderNC:
            b, = ReinterpretAxis(None, in_order=b.order, out_order=OrderNC)(b)

    else:
        c_axes.append(AxisVar())
        if b.order != OrderCN:
            b, = ReinterpretAxis(None, in_order=b.order, out_order=OrderCN)(b)

        b_axis_K = b.order.axes[-2]

    if flags.AGGRESSIVE_ORDER_INFERENCE:
        # Assumption: 2 inner multiplied axes are same.
        unify(a_axis_K, b_axis_K)

    c_normalized, = Linear(None)(a, b)
    c, = ReinterpretAxis(None,
                         in_order=c_normalized.order,
                         out_order=Order(c_axes))(c_normalized)

    converter.set_variable(tf_op.outputs[0], c)
コード例 #8
0
    def optimize_operator(self, graph: Graph, op: ReinterpretAxis):
        x = op.inputs["x"]
        y = op.outputs["y"]

        if len(x.input_to) == 1 and x.output_from is None:
            if x in graph.inputs:
                op.remove_all()
                index = graph.inputs.index(x)
                graph.inputs.remove(x)
                graph.inputs.insert(index, y)
                return True

        if op.parameters["in_order"] == op.parameters["out_order"]:
            _remove_unary_operator(graph, op)
            return True

        return False
コード例 #9
0
def _convert_embedding(converter: KerasConverter, k_op: keras.layers.Embedding):
    x = converter.get_variable(converter.get_input_tensor(k_op)[0])

    if x.order == OrderNC:
        x, = ReinterpretAxis(None, in_order=OrderNC, out_order=OrderNT)(x)

    w = converter.convert_to_constant_variable(k_op.embeddings, OrderCN)

    y, = Embedding(None)(x, w)
    converter.set_variable(converter.get_output_tensor(k_op)[0], y)
コード例 #10
0
    def optimize_operator(self, graph: Graph, op: Reshape):
        x = op.inputs["x"]
        y = op.outputs["y"]

        if x.order == y.order and x.shape == y.shape:
            # no reshape is occurred
            _remove_unary_operator(graph, op)
            return True

        if x.shape == y.shape:
            # only reinterpret_axis is occurred
            op.remove_all()
            y_dummy, = ReinterpretAxis(None,
                                       in_order=x.order,
                                       out_order=y.order)(x)
            y_dummy.replace(y)
            return True

        return False
コード例 #11
0
ファイル: connection.py プロジェクト: newpouy/webdnn
def _convert_linear_function(
        converter: ChainerConverter,
        c_op: "chainer.functions.connection.linear.LinearFunction"):
    x = converter.get_variable(c_op.inputs[0])
    w = converter.get_variable(c_op.inputs[1])  # type: ConstantVariable

    x2, = Reshape(None,
                  in_order=x.order,
                  out_order=OrderNC,
                  out_shape=[x.shape[0], mul(x.shape[1:])])(x)
    w2, = ReinterpretAxis(None, in_order=w.order, out_order=OrderNC)(w)
    w2, = Transpose(None)(w2)
    w2.change_order(OrderCN)

    y, = Linear(None)(x2, w2)
    y, = ReinterpretAxis(None,
                         in_order=y.order,
                         out_order=Order([x.order.axes[0],
                                          w.order.axes[0]]))(y)

    if len(c_op.inputs) == 3:
        # with bias
        b = converter.get_variable(c_op.inputs[2])
        check_broadcast_constraints(y, b)
        y = y + b

    converter.set_variable(c_op.outputs[0](), y)
コード例 #12
0
    def optimize_operator(self, graph: Graph, op: ReinterpretAxis):
        x = op.inputs["x"]
        y = op.outputs["y"]

        if op.parameters["in_order"] == op.parameters["out_order"]:
            _remove_unary_operator(graph, op)
            return True

        if x in graph.inputs and len(x.input_to) == 1:
            # before)
            #
            # x[Graph Input] -{ReinterpretAxis}- h -{op}->
            #
            # after)
            #
            # h[Graph Input] -{op}->

            op.remove_all()
            OptimizeRule.replace_variable(graph, x, y, with_assert=False)
            return True

        return False
コード例 #13
0
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        for op in traverse.filter_nodes(
                traverse.listup_operators(graph),
                Deconvolution2D):  # type: Deconvolution2D
            x = op.inputs["x"]
            w = op.inputs["w"]
            y = op.outputs["y"]
            flag_changed = True
            op.remove_all()

            a_filter = Axis()
            w, = ReinterpretAxis(
                None,
                in_order=Order([Axis.N, Axis.KH, Axis.KW, Axis.C]),
                out_order=Order([Axis.C, Axis.KH, Axis.KW, a_filter]))(w)

            if op.KH == 1 and op.KW == 1 and op.stride == (
                    1, 1) and op.padding == (0, 0):
                # Projection
                w = w.transpose(Order([Axis.C, Axis.KH, Axis.KW, a_filter]))
                w = w.reshape([w.shape_dict[Axis.C], w.shape_dict[a_filter]],
                              Order([Axis.C, a_filter]))
                new_y, = Tensordot(None, [Axis.C, a_filter])(x, w)

            else:
                # General deconvolution
                w = w.transpose(Order([a_filter, Axis.KH, Axis.KW, Axis.C]))
                col, = Tensordot(None, axes=[Axis.C, a_filter])(x, w)
                new_y, = Col2Im(None,
                                ksize=op.ksize,
                                stride=op.stride,
                                padding=op.padding)(col)

            OptimizeRule.replace_variable(graph, new_y.transpose_like(y), y)

        return graph, flag_changed
コード例 #14
0
def template(in_order, in_shape, out_order, out_shape):
    op = ReinterpretAxis(None, in_order=in_order, out_order=out_order)
    x = Variable([in_shape[a] for a in in_order.axes], in_order)
    y, = op(x)
    assert_shape(y, out_shape)