Example #1
0
def _optimize_redundant_transposed_output(
        graph: Graph,
        op: Operator,
        var_name: str,
        target_orders: Optional[Union[Order, List[Order]]] = None):
    v = op.outputs[var_name]

    if len(v.input_to) != 1:
        return False

    op2 = list(v.input_to)[0]

    if not isinstance(op2, Transpose):
        return False

    if target_orders is not None:
        if isinstance(target_orders, Order):
            target_orders = [target_orders]

        if op2.outputs["y"].order not in target_orders:
            return False

    v2 = op2.outputs["y"]
    op2.remove_all()
    OptimizeRule.replace_variable(graph, v, v2, with_assert=False)
    return True
Example #2
0
    def fold_constance(self, graph: Graph):
        x0 = self.inputs["x0"]  # type: ConstantVariable
        y = self.outputs["y"]
        self.remove_all()

        new_y = ConstantVariable(x0.copy().change_order(y.order).data ** self.value, y.order)
        OptimizeRule.replace_variable(graph, y, new_y)
Example #3
0
def _split_im2col(graph: Graph, op: Im2Col, v: Variable, v_pair: Sequence[Variable], axis: Axis):
    s1 = v_pair[0].shape_dict[axis]
    im = op.inputs["im"]
    col = op.outputs["col"]

    op.remove_all()

    if v == col:
        """
        before)

        im -{Im2Col}- col

        after)

                            +- col_0
        im -{PartialIm2Col}-+
                            +- col_1
        """
        col_0, col_1 = PartialIm2Col(None,
                                     ksize=op.ksize, stride=op.stride, padding=op.padding, dilation_rate=op.dilation_rate,
                                     axis=axis, sections=[s1])(im)

        OptimizeRule.replace_variable(graph, col_0.transpose(v_pair[0].order), v_pair[0])
        OptimizeRule.replace_variable(graph, col_1.transpose(v_pair[1].order), v_pair[1])

    elif v == im:
        raise NotImplementedError(f"Variable is too large to handle in WebGL backend: {v}")

    else:
        raise UnexpectedAndPleaseReportError
Example #4
0
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        for op in traverse.filter_nodes(
                traverse.listup_operators(graph),
                Deconvolution2D):  # type: Deconvolution2D
            x = op.inputs["x"]
            w = op.inputs["w"]
            y = op.outputs["y"]
            flag_changed = True
            op.remove_all()

            a_filter, a_kh, a_kw = Axis(), Axis(), Axis()
            w, = ReinterpretAxis(None,
                                 in_order=OrderNHWC,
                                 out_order=Order(
                                     [Axis.C, a_kh, a_kw, a_filter]))(w)
            x, = ReinterpretAxis(None,
                                 in_order=OrderNHWC,
                                 out_order=Order(
                                     [Axis.N, Axis.H, Axis.W, a_filter]))(x)

            col, = Tensordot(None, axes=a_filter)(x, w)
            col = col.transpose(
                Order([Axis.N, Axis.H, Axis.W, a_kh, a_kw, Axis.C]))
            col = col.reshape(shape=[*col.shape[0:3],
                                     mul(col.shape[3:6])],
                              order=OrderNHWC)

            new_y, = Col2Im(None,
                            ksize=op.ksize,
                            stride=op.stride,
                            padding=op.padding)(col)
            OptimizeRule.replace_variable(graph, new_y.transpose_like(y), y)

        return graph, flag_changed
Example #5
0
    def fold_constance(self, graph: Graph):
        x0 = self.inputs["x0"]
        y = self.outputs["y"]

        OptimizeRule.replace_variable(graph, y,
                                      x0.copy().change_order(y.order))
        self.remove_all()
Example #6
0
    def optimize(self, graph: Graph):
        flag_changed = False

        for x in graph.inputs:
            if len(x.input_to) != 1:
                continue

            op = list(x.input_to)[0]
            if isinstance(op, ConvertRGBAtoR) or isinstance(
                    op, ConvertRtoRGBA):
                flag_changed = True
                y = op.outputs["y"]
                op.remove_all()
                OptimizeRule.replace_variable(graph, x, y)

        for y in graph.outputs:
            if isinstance(y.output_from, ConvertRGBAtoR) or isinstance(
                    y.output_from, ConvertRtoRGBA):
                flag_changed = True

                x = y.output_from.inputs["x0"]
                i = graph.outputs.index(y)
                graph.outputs.remove(y)
                graph.outputs.insert(i, x)

                if len(y.input_to) == 0:
                    y.output_from.remove_all()

        return graph, flag_changed
Example #7
0
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        for op in traverse.filter_nodes(traverse.listup_operators(graph),
                                        Convolution2D):  # type: Convolution2D
            x = op.inputs["x"]
            w = op.inputs["w"]
            y = op.outputs["y"]
            flag_changed = True
            op.remove_all()

            a_filter, a_kh, a_kw = Axis(), Axis(), Axis()
            w, = ReinterpretAxis(None,
                                 in_order=OrderNHWC,
                                 out_order=Order(
                                     [Axis.C, a_kh, a_kw, a_filter]))(w)

            if op.WH == 1 and op.WW == 1 and op.stride == (
                    1, 1) and op.padding == (0, 0):
                # Projection
                col, = ReinterpretAxis(
                    None,
                    in_order=OrderNHWC,
                    out_order=Order([Axis.N, Axis.H, Axis.W, a_filter]))(x)

                new_y, = Tensordot(None,
                                   [[a_filter], [a_kh, a_kw, a_filter]])(col,
                                                                         w)

            elif op.WH == x.shape_dict[Axis.H] and op.WW == x.shape_dict[
                    Axis.W] and op.padding == (0, 0):
                # Global convolution
                col, = ReinterpretAxis(None,
                                       in_order=OrderNHWC,
                                       out_order=Order(
                                           [Axis.N, a_kh, a_kw, a_filter]))(x)

                new_y, = Tensordot(
                    None, [[[a_kh, a_kw, a_filter], [a_kh, a_kw, a_filter]],
                           [a_kh, a_kw, a_filter]])(col, w)

            else:
                # General convolution
                col, = Im2Col(None,
                              ksize=op.ksize,
                              stride=op.stride,
                              padding=op.padding,
                              dilation_rate=op.dilation_rate)(x)
                col, = ReinterpretAxis(
                    None,
                    in_order=OrderNHWC,
                    out_order=Order([Axis.N, Axis.H, Axis.W, a_filter]))(col)

                new_y, = Tensordot(None,
                                   [[a_filter], [a_kh, a_kw, a_filter]])(col,
                                                                         w)

            new_y = new_y.transpose(y.order)
            OptimizeRule.replace_variable(graph, new_y, y)

        return graph, flag_changed
Example #8
0
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        for concat in traverse.filter_nodes(traverse.listup_operators(graph), Concat):
            if len(concat.inputs) == 2:
                # Unrolling is not needed
                continue

            flag_changed = True
            xs = [concat.inputs[f"x{i}"] for i in range(len(concat.inputs))]
            y = concat.outputs["y"]
            concat.remove_all()

            while len(xs) > 1:
                hs = []
                while len(xs) > 0:
                    if len(xs) == 1:
                        hs.append(xs.pop(0))

                    else:
                        x0, x1 = xs.pop(0), xs.pop(0)
                        h, = Concat(None, axis=concat.axis)(x0, x1)
                        hs.append(h)

                xs = hs

            OptimizeRule.replace_variable(graph, y, xs[0].transpose_like(y))

        return graph, flag_changed
Example #9
0
    def fold_constance(self, graph: "graph.Graph"):
        x = self.inputs["x"]  # type: ConstantVariable
        y = self.outputs["y"]
        self.remove_all()

        new_y = ConstantVariable(x.data.copy(), y.order)
        OptimizeRule.replace_variable(graph, y, new_y)
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        for op in traverse.filter_nodes(traverse.listup_operators(graph),
                                        Linear):
            x = op.inputs["x"]
            w = op.inputs["w"]
            y = op.outputs["y"]

            flag_changed = True
            op.remove_all()
            a_filter = Axis()

            if x.ndim == 2:
                w, = ReinterpretAxis(None,
                                     in_order=OrderNC,
                                     out_order=Order([Axis.C, a_filter]))(w)
                new_y, = Tensordot(None, axes=[Axis.C, a_filter])(x, w)

            elif x.ndim == 4:
                w, = ReinterpretAxis(
                    None,
                    in_order=OrderNHWC,
                    out_order=Order([Axis.C, Axis.H, Axis.W, a_filter]))(w)
                new_y, = Tensordot(None,
                                   axes=[[Axis.H, Axis.W, Axis.C],
                                         [Axis.H, Axis.W, a_filter]])(x, w)

            else:
                raise NotImplementedError

            OptimizeRule.replace_variable(graph, new_y.transpose_like(y), y)

        return graph, flag_changed
Example #11
0
    def fold_constance(self, graph: Graph):
        x = self.inputs["x"]  # type: ConstantVariable
        y = self.outputs["y"]

        new_y = ConstantVariable(np.tile(x.data, self.multiplier), x.order)
        new_y.change_order(y.order)
        OptimizeRule.replace_variable(graph, y, new_y)
        self.remove_all()
Example #12
0
    def fold_constance(self, graph: Graph):
        xs = [self.inputs[f"x{i}"] for i in range(len(self.inputs))]  # type: List[ConstantVariable]
        y = self.outputs["y"]

        data = np.concatenate([x.copy().change_order(y.order).data for x in xs], axis=y.order.axes_dict[self.axis])
        new_y = ConstantVariable(data, y.order)
        OptimizeRule.replace_variable(graph, y, new_y)
        self.remove_all()
Example #13
0
    def fold_constance(self, graph: Graph):
        x = self.inputs["x"]  # type: ConstantVariable
        y = self.outputs["y"]
        self.remove_all()

        y_new = ConstantVariable(x.data, x.order).change_order(self.in_order)
        y_new = ConstantVariable(y_new.data.reshape(self.out_shape), self.out_order).change_order(y.order)
        OptimizeRule.replace_variable(graph, y, y_new)
Example #14
0
    def fold_constance(self, graph: Graph):
        x0 = self.inputs["x0"]  # type: ConstantVariable
        y = self.outputs["y"]
        self.remove_all()

        y_new = ConstantVariable(x0.data, x0.order).change_order(y.order)
        y_new.data = y_new.data**self.value
        OptimizeRule.replace_variable(graph, y, y_new)
Example #15
0
    def fold_constance(self, graph: Graph):
        x0 = self.inputs["x0"]  # type: ConstantVariable
        y = self.outputs["y"]

        new_y = ConstantVariable(1 / np.sqrt(x0.data), x0.order)
        new_y.change_order(y.order)
        OptimizeRule.replace_variable(graph, y, new_y)
        self.remove_all()
Example #16
0
def _split_pooling_2d(graph: Graph, op: Pooling2D, v: Variable,
                      v_pair: Sequence[Variable], axis: Axis):
    s1 = v_pair[0].shape_dict[axis]
    x = op.inputs["x"]
    y = op.outputs["y"]
    op.remove_all()

    if v == x:
        x_0, x_1 = v_pair
        s, k, p = (op.SH, op.KH, op.PH) if axis == Axis.H else (op.SW, op.KW,
                                                                op.PW)

        raise NotImplementedError

    elif v == y:
        y_0, y_1 = v_pair
        s, k, p = (op.SH, op.KH, op.PH) if axis == Axis.H else (op.SW, op.KW,
                                                                op.PW)

        x_0_range = (0 * s - k // 2, (y_0.shape_dict[axis] - 1) * s + k)
        x_1_range = (y_0.shape_dict[axis] * s - k // 2,
                     (y.shape_dict[axis] - 1) * s + k)

        indices = AxisKeyDict(OrderNHWC.axes,
                              [slice(None) for _ in OrderNHWC.axes])

        indices_0 = AxisKeyDict(indices)
        indices_0[axis] = slice(max(x_0_range[0], 0),
                                min(x_0_range[1], x.shape_dict[axis]))

        indices_1 = AxisKeyDict(indices)
        indices_1[axis] = slice(max(x_1_range[0], 0),
                                min(x_1_range[1], x.shape_dict[axis]))

        x_0, = Slice(None, indices=indices_0)(x)
        x_1, = Slice(None, indices=indices_1)(x)

        if p > 0:
            data = ConstantVariable(
                np.zeros([
                    p if a == axis else x.shape_dict[a] for a in x.order.axes
                ]), x.order)
            x_0, = Concat(None, axis=axis)(data, x_0)
            x_1, = Concat(None, axis=axis)(x_1, data)

        op_0, op_1 = op.copy(), op.copy()
        new_padding = (0, op.PW) if axis == Axis.H else (op.PH, 0)
        op_0.parameters["padding"] = new_padding
        op_1.parameters["padding"] = new_padding

        y_0_new, = op_0(x_0)
        y_1_new, = op_1(x_1)

        OptimizeRule.replace_variable(graph, y_0_new.transpose_like(y_0), y_0)
        OptimizeRule.replace_variable(graph, y_1_new.transpose_like(y_1), y_1)

    else:
        raise UnexpectedAndPleaseReportError()
Example #17
0
def _split_partial_im2col(graph: Graph, op: PartialIm2Col, v: Variable,
                          v_pair: Sequence[Variable], axis: Axis):
    s1 = v_pair[0].shape_dict[axis]
    im = op.inputs["im"]
    cols = [op.outputs[f"col{i}"] for i in range(len(op.outputs))]
    sections = op.sections

    if v == im:
        raise NotImplementedError(
            f"Variable is too large to handle in WebGL backend: {v}")

    elif v in cols:
        op.remove_all()

        if axis == op.axis:
            """
            before)
                                +- col0
                                |
            im -{PartialIm2Col}-+- col1
                                |
                                +- col2

            after)
                                +- col0
                                |
                                +- col1_0
            im -{PartialIm2Col}-+
                                +- col1_1
                                |
                                +- col2
            """
            target_i = cols.index(v)

            s_insert = (0 if target_i == 0 else sections[target_i - 1]) + s1
            new_sections = list(sections)
            new_sections.insert(target_i, s_insert)

            cols.pop(target_i)
            cols.insert(target_i + 0, v_pair[0])
            cols.insert(target_i + 1, v_pair[1])

            new_cols = PartialIm2Col(None,
                                     ksize=op.ksize,
                                     stride=op.stride,
                                     padding=op.padding,
                                     dilation_rate=op.dilation_rate,
                                     axis=axis,
                                     sections=new_sections)(im)
            for col, new_col in zip(cols, new_cols):
                OptimizeRule.replace_variable(graph, new_col, col)

        else:
            raise NotImplementedError(
                f"Variable is too large to handle in WebGL backend: {v}")

    else:
        raise UnexpectedAndPleaseReportError
    def optimize(self, graph: Graph):
        flag_changed = False
        """
        before)

        v0[RGBA] -{ConvertRtoRGBA}- v1[RGBA]

        after)

        v0[RGBA] -{ConvertRGBAtoR}- v2[Order=v0.order][R] -{Transpose}- v3[Order=v1.order][R]-{ConvertRtoRGBA}- v1[RGBA]
        """
        matches = traverse.search_sub_structure(
            graph, [Variable, ConvertRtoRGBA, Variable])
        while len(matches) > 0:
            v0, r2rgba, v1 = matches.pop(
            )  # type: Variable, ConvertRtoRGBA, Variable
            if not (ChannelMode.get(v0) == ChannelMode.get(v1) ==
                    ChannelModeEnum.RGBA):
                continue

            flag_changed = True

            r2rgba.remove_all()

            v2 = convert_rgba_to_r(v0)
            v2.change_order(v0.order)

            v3 = v2.transpose(v1.order)

            v1_new = convert_r_to_rgba(v3)
            v1_new.change_order(v1.order)

            OptimizeRule.replace_variable(graph, v1_new, v1)
        """
        before)

        v0[R] -{ConvertRGBAtoR}- v1[R]

        after)

        v0[R] -{Transpose}- v1[R] 
        """
        matches = traverse.search_sub_structure(
            graph, [Variable, ConvertRGBAtoR, Variable])
        while len(matches) > 0:
            v0, rgba2r, v1 = matches.pop(
            )  # type: Variable, ConvertRGBAtoR, Variable
            if not (ChannelMode.get(v0) == ChannelMode.get(v1) ==
                    ChannelModeEnum.R):
                continue

            flag_changed = True

            rgba2r.remove_all()

            OptimizeRule.replace_variable(graph, v0.transpose(v1.order), v1)

        return graph, flag_changed
Example #19
0
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        variables = traverse.listup_variables(graph)

        while len(variables) > 0:
            x = variables.pop()
            for op1, op2 in itertools.permutations(x.input_to, 2):
                if op2 is op1:
                    continue

                if op2.__class__ != op1.__class__:
                    # class is not same
                    continue

                if any((x_name not in op2.inputs) or (
                        op2.inputs[x_name] != op1.inputs[x_name])
                       for x_name in op1.inputs.keys()):
                    # input is not same
                    continue

                if any((key not in op2.parameters) or (
                        op2.parameters[key] != op1.parameters[key])
                       for key in op1.parameters.keys()):
                    # parameter is not same
                    continue

                flag_changed = True

                vs_1 = dict(op1.outputs)
                vs_2 = dict(op2.outputs)

                op2.remove_all()

                for v_name, v1 in vs_1.items():
                    v2 = vs_2[v_name]
                    if v1.order == v2.order:
                        """
                                    +-{op3}-
                        -{op1}- v1 -+
                                    +-{op4}-
                        """
                        OptimizeRule.replace_variable(graph, v2, v1)

                    else:
                        """
                                    +-{op3}-
                        -{op1}- v1 -+
                                    +-{Transpose}- v2 -{op4}-
                        """
                        v2_dummy, = Transpose(None)(v1)
                        v2_dummy.change_order(v2.order)
                        OptimizeRule.replace_variable(graph, v2_dummy, v2)

                variables = traverse.listup_variables(graph)
                break

        return graph, flag_changed
Example #20
0
    def fold_constance(self, graph: Graph):
        x0 = self.inputs["x0"]  # type: ConstantVariable
        y = self.outputs["y"]
        self.remove_all()

        OptimizeRule.replace_variable(
            graph, y,
            ConstantVariable(np.arccosh(x0.copy().change_order(y.order).data),
                             y.order))
Example #21
0
    def fold_constance(self, graph: Graph):
        in_order = self.parameters["in_order"]
        out_shape = self.parameters["out_shape"]
        out_order = self.parameters["out_order"]

        x = self.inputs["x"]  # type: ConstantVariable
        y = self.outputs["y"]
        self.remove_all()

        new_y = ConstantVariable(x.copy().change_order(in_order).data.reshape(out_shape), out_order)
        OptimizeRule.replace_variable(graph, y, new_y)
Example #22
0
    def fold_constance(self, graph: "graph.Graph"):
        x = self.inputs["x"]  # type: ConstantVariable
        y = self.outputs["y"]
        self.remove_all()

        y_new = ConstantVariable(
            x.data,
            Order([
                self.out_order.axes[self.in_order.axes.index(a)]
                for a in x.order.axes
            ]))
        OptimizeRule.replace_variable(graph, y, y_new.change_order(y.order))
Example #23
0
    def fold_constance(self, graph: Graph):
        x = self.inputs["x"]  # type: ConstantVariable
        y = self.outputs["y"]

        remained_axes_in_x_order = [a for a in x.order.axes if a in y.order.axes]
        new_axes = [a for a in y.order.axes if a not in x.order.axes]
        slices = [self.indices[a] for a in x.order.axes] + [None] * len(new_axes)

        new_y = ConstantVariable(x.data[slices], Order(remained_axes_in_x_order + new_axes))
        new_y.change_order(y.order)
        OptimizeRule.replace_variable(graph, y, new_y)
        self.remove_all()
Example #24
0
    def fold_constance(self, graph: Graph):
        x = self.inputs["x"]  # type: ConstantVariable
        ys = [self.outputs[f"y{i}"] for i in range(len(self.outputs))]
        axis = self.parameters["axis"]
        sections = self.parameters["sections"]

        self.remove_all()

        y_datum = np.split(x.data, sections, x.order.axes_dict[axis])
        for i, y in enumerate(ys):
            y_new = ConstantVariable(y_datum[i], x.order).change_order(y.order)
            OptimizeRule.replace_variable(graph, y, y_new)
Example #25
0
    def fold_constance(self, graph: Graph):
        x = self.inputs["x"]  # type: ConstantVariable
        y = self.outputs["y"]

        new_axes = list(x.order.axes)
        new_axes.remove(self.axis)
        new_y = ConstantVariable(
            np.sum(x.data, axis=x.order.axes_dict[self.axis]), Order(new_axes))

        new_y.change_order(y.order)

        OptimizeRule.replace_variable(graph, y, new_y)
        self.remove_all()
Example #26
0
    def optimize(self, graph: Graph) -> Tuple[Graph, bool]:
        flag_changed = False
        matches = traverse.search_sub_structure(
            graph, [Tensordot, Variable, ElementwiseMul, Variable])
        while len(matches) > 0:
            tensordot, h, elementwise_mul, y = matches.pop(
            )  # type: Tensordot, Variable, ElementwiseMul, Variable
            if len(h.input_to) != 1:
                # h will be removed by this optimization rule
                continue

            if isinstance(tensordot.inputs["A"], ConstantVariable):
                w1 = tensordot.inputs["A"]
                reduced_axes = tensordot.axes[0]

            elif isinstance(tensordot.inputs["B"], ConstantVariable):
                w1 = tensordot.inputs["B"]
                reduced_axes = tensordot.axes[1]

            else:
                continue

            if isinstance(
                    elementwise_mul.inputs["x0"],
                    ConstantVariable) and elementwise_mul.inputs["x1"] == h:
                w2 = elementwise_mul.inputs["x0"]

            elif isinstance(
                    elementwise_mul.inputs["x1"],
                    ConstantVariable) and elementwise_mul.inputs["x0"] == h:
                w2 = elementwise_mul.inputs["x1"]

            else:
                continue

            if any(axis not in w1.order.axes for axis in w2.order.axes):
                continue

            if any(axis in reduced_axes for axis in w2.order.axes):
                continue

            flag_changed = True
            elementwise_mul.remove_all()
            OptimizeRule.replace_variable(graph,
                                          w1,
                                          w1.copy() * w2,
                                          with_assert=False)
            OptimizeRule.replace_variable(graph, h, y, with_assert=False)

        return graph, flag_changed
    def optimize_operator(self, graph: Graph, op: ReinterpretAxis):
        x = op.inputs["x"]
        y = op.outputs["y"]

        if len(x.input_to) == 1 and x.output_from is None:
            op.remove_all()

            if isinstance(x, ConstantVariable):
                x = ConstantVariable(x.data, y.order)

                if y in graph.outputs:
                    index = graph.outputs.index(y)
                    graph.outputs.remove(y)
                    graph.outputs.insert(index, x)

                else:
                    OptimizeRule.replace_variable(graph, y, x)
            else:
                assert x in graph.inputs

                index = graph.inputs.index(x)
                graph.inputs.remove(x)
                graph.inputs.insert(index, y)

            return True

        if op.parameters["in_order"] == op.parameters["out_order"]:
            _remove_unary_operator(graph, op)
            return True

        flag_changed = False
        for axis1, axis2 in zip(op.parameters["in_order"].axes,
                                op.parameters["out_order"].axes):
            is_resolved1 = not (isinstance(axis1, AxisVar)
                                and axis1.value is None)
            is_resolved2 = not (isinstance(axis2, AxisVar)
                                and axis2.value is None)

            if is_resolved1 and not is_resolved2:
                axis2.unify(axis1)
                flag_changed = True

            elif not is_resolved1 and is_resolved2:
                axis1.unify(axis2)
                flag_changed = True

        if flag_changed:
            return True

        return False
Example #28
0
    def optimize_pair(self, graph: Graph, op1: ElementwiseAdd, op2: ElementwiseDiv):
        c1, v1 = _get_constant_and_variable(op1, "x0", "x1")
        if c1 is None:
            return False

        c2, v2 = _get_constant_and_variable(op2, "x0", "x1")
        if c2 is None:
            return False

        y2 = op2.outputs["y"]
        op2.remove_all()
        op1.remove_all()
        y = (v1 / c2) + (c1 / c2)
        OptimizeRule.replace_variable(graph, y2, y.change_order(y2.order))
        return True
Example #29
0
def _split_tensorwise(graph: Graph, op: Operator, v: Variable,
                      v_pair: Sequence[Variable], axis: Axis):
    s1 = v_pair[0].shape_dict[axis]
    s2 = v_pair[1].shape_dict[axis]
    xs = dict(op.inputs)
    ys = dict(op.outputs)
    op.remove_all()

    op_0 = op.copy()
    op_1 = op.copy()

    for key, x in xs.items():
        if x == v:
            x_0, x_1 = v_pair

        else:
            if axis in x.order.axes:
                x_0, x_1 = SplitAxis(None, axis=axis, sections=[s1])(x)

            else:
                # splitting is not occurred
                x_0 = x_1 = x

        op_0.append_input(key, x_0)
        op_1.append_input(key, x_1)

    for key, y in ys.items():
        if y == v:
            y_0, y_1 = v_pair

        else:
            if axis in y.order.axes:
                # TODO (Kiikurage)
                # Attribute attached to "y" is not copied to neither "y_0" or "y_1"
                y_0 = Variable([
                    s1 if a == axis else y.shape_dict[a] for a in y.order.axes
                ], y.order)
                y_1 = Variable([
                    s2 if a == axis else y.shape_dict[a] for a in y.order.axes
                ], y.order)
                y_new, = Concat(None, axis=axis)(y_0, y_1)
                OptimizeRule.replace_variable(graph, y, y_new)

            else:
                raise UnexpectedAndPleaseReportError

        op_0.append_output(key, y_0)
        op_1.append_output(key, y_1)
    def optimize_operator(self, graph: Graph, op: Reshape):
        x = op.inputs["x"]
        y = op.outputs["y"]

        if x.order == y.order and x.shape == y.shape:
            # no reshape is required
            _remove_unary_operator(graph, op)
            return True

        if x.shape == y.shape:
            # only reinterpret_axis is required
            op.remove_all()
            y_dummy = x.reinterpret_axes(y.order)
            OptimizeRule.replace_variable(graph, y_dummy, y)
            return True

        return False