Exemplo n.º 1
0
 def __init__(self, name: Optional[str], n: float, k: float, alpha: float, beta: float):
     super().__init__(name)
     self.parameters["n"] = n
     self.parameters["k"] = k
     self.parameters["alpha"] = alpha
     self.parameters["beta"] = beta
     self.attributes.add(Tensorwise(Axis.N))
     self.attributes.add(Tensorwise(Axis.H))
     self.attributes.add(Tensorwise(Axis.W))
Exemplo n.º 2
0
 def __init__(self, name: Optional[str], ksize: IntOrTuple,
              stride: IntOrTuple, padding: IntOrTuple,
              dilation_rate: IntOrTuple):
     super().__init__(name)
     self.parameters["ksize"] = to_tuple(ksize)
     self.parameters["stride"] = to_tuple(stride)
     self.parameters["padding"] = to_tuple(padding)
     self.parameters["dilation_rate"] = to_tuple(dilation_rate)
     self.attributes.add(Tensorwise(Axis.N))
     self.attributes.add(Tensorwise(Axis.C))
Exemplo n.º 3
0
 def __init__(self, name: Optional[str], ksize: IntOrTuple,
              stride: IntOrTuple, padding: IntOrTuple,
              dilation_rate: IntOrTuple, sections: List[int], axis: Axis):
     super().__init__(name)
     self.parameters["ksize"] = to_tuple(ksize)
     self.parameters["stride"] = to_tuple(stride)
     self.parameters["padding"] = to_tuple(padding)
     self.parameters["dilation_rate"] = to_tuple(dilation_rate)
     self.parameters["sections"] = list(sections)
     self.parameters["axis"] = axis
     if axis != Axis.N:
         self.attributes.add(Tensorwise(self, Axis.N))
     if axis != Axis.C:
         self.attributes.add(Tensorwise(self, Axis.C))
Exemplo n.º 4
0
def _split_axis(v: Variable, axis: Axis, graph):
    """
    split variable by specified axis
    """
    s1 = v.shape_dict[axis] // 2
    s2 = v.shape_dict[axis] - s1

    if isinstance(v, ConstantVariable):
        v_datum = np.split(v.data, [s1], v.order.axes_dict[axis])
        v1 = ConstantVariable(v_datum[0], v.order)
        v2 = ConstantVariable(v_datum[1], v.order)

    else:
        v1 = Variable([s1 if a == axis else v.shape_dict[a] for a in v.order.axes], v.order)
        v2 = Variable([s2 if a == axis else v.shape_dict[a] for a in v.order.axes], v.order)

    ops = list(v.input_to)
    if v.output_from is not None:
        ops += [v.output_from]

    for op in ops:
        if all(isinstance(v, ConstantVariable) for v in op.inputs.values()):
            op.fold_constance(graph)

        elif isinstance(op, Tensordot):
            # NOTE:
            # "_split_tensordot" must be called before "_split_tensorwise".
            #
            # Let consider follow case:
            #
            #   A.order = [Axis.X, Axis.Y]
            #   B.order = [Axis.Y, Axis.Z]
            #   C, = Tensordot(None, [Axis.Y, Axis.Z])(A, B)  # -> C.order = [Axis.X, Axis.Y]
            #
            # In this case, tensordot operator has "Tensorwise[X]" and "Tensorwise[Y]" attributes, because "Tensordot" operation is
            # tensorwise operation for each output axis. However, "Axis.Y" is also contained in reduced axes in "A". Therefore,
            # "_split_tensorwise" incorrectly split "A".
            #
            _split_tensordot(graph, op, v, [v1, v2], axis)

        elif Tensorwise.check_splittable(op, axis):
            _split_tensorwise(graph, op, v, [v1, v2], axis)

        elif isinstance(op, SplitAxis):
            _split_splitaxis(graph, op, v, [v1, v2], axis)

        elif isinstance(op, Concat):
            _split_concat(graph, op, v, [v1, v2], axis)

        elif isinstance(op, Im2Col):
            _split_im2col(graph, op, v, [v1, v2], axis)

        elif isinstance(op, PartialIm2Col):
            _split_partial_im2col(graph, op, v, [v1, v2], axis)

        elif isinstance(op, Reshape):
            _split_reshape(graph, op, v, [v1, v2], axis)

        else:
            raise NotImplementedError(f"Variable is too large to handle in WebGL backend: {v}")
Exemplo n.º 5
0
    def __init__(self,
                 name: Optional[str],
                 ksize: IntOrTuple,
                 stride: IntOrTuple,
                 padding: IntOrTuple,
                 dilation_rate: Optional[IntOrTuple] = 1):
        super().__init__(name)
        self.parameters["ksize"] = assert_sequence_type(to_tuple(ksize),
                                                        (int, Placeholder),
                                                        message=f"""
[Convolution2D] Parameter "ksize" must be integer or tuple of integer""")
        self.parameters["stride"] = assert_sequence_type(to_tuple(stride),
                                                         (int, Placeholder),
                                                         message=f"""
[Convolution2D] Parameter "stride" must be integer or tuple of integer""")
        self.parameters["padding"] = assert_sequence_type(to_tuple(padding),
                                                          (int, Placeholder),
                                                          message=f"""
[Convolution2D] Parameter "padding" must be integer or tuple of integer""")
        self.parameters["dilation_rate"] = assert_sequence_type(
            to_tuple(dilation_rate), (int, Placeholder),
            message=f"""
[Convolution2D] Parameter "dilation_rate" must be integer or tuple of integer"""
        )
        self.attributes.add(Tensorwise(self, Axis.N))
Exemplo n.º 6
0
    def exec(self):
        x = self.inputs["x"]
        x_shape_dict = x.shape_dict
        N = x_shape_dict[Axis.N]
        H2 = (x_shape_dict[Axis.H] + 2 * self.parameters["padding"][0] + self.parameters["stride"][0] - self.parameters["ksize"][0] - 1) // \
             self.parameters["stride"][0] + 1
        W2 = (x_shape_dict[Axis.W] + 2 * self.parameters["padding"][1] + self.parameters["stride"][1] - self.parameters["ksize"][1] - 1) // \
             self.parameters["stride"][1] + 1
        C2 = x_shape_dict[Axis.C]
        if ((x_shape_dict[Axis.H] + 2 * self.parameters["padding"][0] - self.parameters["ksize"][0]) % self.parameters["stride"][0] != 0) or \
            ((x_shape_dict[Axis.W] + 2 * self.parameters["padding"][1] - self.parameters["ksize"][1]) % self.parameters["stride"][1] != 0):
            # https://github.com/fchollet/keras/issues/5090#issuecomment-279495401
            console.warning(
                "[Pooling2D] Performing pooling with parameters which causes edge is ignored. " +
                "Which edge (left / right) is ignored is different on frameworks," +
                " so slightly different result will be generated.")

        y = Variable([N, H2, W2, C2], OrderNHWC)
        y.change_order(x.order)  # output same order as input to preserve following reshape semantics

        self.append_output("y", y)

        for axis in x.order.axes:
            if axis == Axis.H or axis == Axis.W:
                continue

            self.attributes.add(Tensorwise(self, axis))

        return y,
Exemplo n.º 7
0
    def exec(self):
        x = self.inputs["x"]

        axis = self.parameters["axis"]
        sections = [0] + self.parameters["sections"] + [x.shape_dict[axis]]
        outputs = []

        for i, i_from in enumerate(sections[:-1]):
            i_to = sections[i + 1]
            assert i_from < i_to, f"[SplitAxis] sections must be sorted ascending order: sections={sections}, sections[{i}]={i_from}, " \
                                  f"sections[{i+1}]={i_to}"

            out_shape = list(x.shape)
            out_shape[x.order.axes_dict[axis]] = i_to - i_from
            y = Variable(out_shape, x.order)

            outputs.append(y)
            self.append_output(f"y{i}", y)

        for a in x.order.axes:
            if a == axis:
                continue

            self.attributes.add(Tensorwise(self, a))

        return outputs
Exemplo n.º 8
0
    def exec(self):
        x = self.inputs["x"]
        x_shape_dict = x.shape_dict
        N = x_shape_dict[Axis.N]
        H2 = (x_shape_dict[Axis.H] + 2 * self.PH - self.KH +
              (self.SH - 1 if self.cover_all else 0)) // self.SH + 1
        W2 = (x_shape_dict[Axis.W] + 2 * self.PW - self.KW +
              (self.SW - 1 if self.cover_all else 0)) // self.SW + 1
        C2 = x_shape_dict[Axis.C]

        # odd_padding_height = (x_shape_dict[Axis.H] + 2 * self.PH - self.KH) % self.SH != 0
        # odd_padding_width = (x_shape_dict[Axis.W] + 2 * self.PW - self.KW) % self.SW != 0
        # if odd_padding_height or odd_padding_width:
        #     # https://github.com/fchollet/keras/issues/5090#issuecomment-279495401
        #     console.warning(
        #         "[Pooling2D] Performing pooling with parameters which causes edge is ignored. " +
        #         "Which edge (left / right) is ignored is different on frameworks," +
        #         " so slightly different result will be generated.")

        y = Variable([N, H2, W2, C2], OrderNHWC)
        y.change_order(
            x.order
        )  # output same order as input to preserve following reshape semantics

        self.append_output("y", y)

        for axis in x.order.axes:
            if axis == Axis.H or axis == Axis.W:
                continue

            self.attributes.add(Tensorwise(self, axis))

        return y,
Exemplo n.º 9
0
    def __call__(self, x: Variable):
        axis = self.parameters["axis"]
        sections = [0] + self.parameters["sections"] + [x.shape_dict[axis]]
        ys = []  # type: List[Tuple[str, Variable]]

        for i, i_from in enumerate(sections[:-1]):
            i_to = sections[i + 1]
            assert i_from < i_to, f"[SplitAxis] sections must be sorted ascending order: sections={sections}, sections[{i}]={i_from}, " \
                                  f"sections[{i+1}]={i_to}"

            out_shape = list(x.shape)
            out_shape[x.order.axes_dict[axis]] = i_to - i_from
            y = Variable(out_shape, x.order)

            ys.append((f"y{i}", y))

        for a in x.order.axes:
            if a == axis:
                continue

            self.attributes.add(Tensorwise(a))

        self.append_input(f"x", x)
        for key, y in ys:
            self.append_output(key, y)

        return tuple(y for _, y in ys)
Exemplo n.º 10
0
    def exec(self):
        xs = [self.inputs[f"x{i}"] for i in range(len(self.inputs))]
        axis = self.axis
        axis_index = xs[0].order.axes_dict[axis]
        axes = xs[0].order.axes

        y_shape = list(xs[0].shape)  # type: List[Placeholder]
        y_shape[axis_index] = 0
        y_order = xs[0].order

        for a in y_order.axes:
            if a == axis:
                continue

            self.attributes.add(Tensorwise(self, a))

        for i, x in enumerate(xs):
            assert x.order.check_same_axes(xs[0].order), f"""
[Concat] Input variable of Concat operator must have same axes
  (x0.order.axes) = {xs[0].order.axes}
  (x{i}.order.axes) = {xs[i].order.axes}"""

            for other_axis in [other_axis for other_axis in axes if other_axis != axis]:
                if Placeholder.check_resolved(xs[0].shape_dict[other_axis]) and Placeholder.check_resolved(x.shape_dict[other_axis]):
                    assert xs[0].shape_dict[other_axis] == x.shape_dict[other_axis], f"""
[Concat] Input variable of Concat operator must be same shape except the specified axis:
  (x0.shape_dict[{other_axis}]) = {xs[0].shape_dict[other_axis]}
  (x{i}.shape_dict[{other_axis}]) = {xs[i].shape_dict[other_axis]}"""

            y_shape[axis_index] += x.shape_dict[axis]

        y = Variable(y_shape, y_order)
        self.append_output("y", y)
        return y,
Exemplo n.º 11
0
    def __call__(self, x: Variable):
        for axis in x.order.axes:
            if axis == Axis.H or axis == Axis.W:
                continue

            self.attributes.add(Tensorwise(self, axis))

        self.append_input("x", x)
        return self.exec()
Exemplo n.º 12
0
    def exec(self):
        x = self.inputs["x"]

        y = Variable(
            x.shape,
            Order([
                self.out_order.axes[self.in_order.axes_dict[a]]
                for a in x.order.axes
            ]))
        self.append_output("y", y)

        for axis in x.order.axes:
            self.attributes.add(Tensorwise(self, axis))

        return y,
Exemplo n.º 13
0
    def exec(self):
        reduced_axis = self.axis

        x = self.inputs["x"]
        y_axes = [axis for axis in x.order.axes if axis != reduced_axis]
        y_shape = [x.shape_dict[axis] for axis in y_axes]
        y_order = Order(y_axes)

        # Add tensorwise attributes
        for axis in y_order.axes:
            self.attributes.add(Tensorwise(self, axis))

        y = variable.Variable(y_shape, y_order)
        self.append_output("y", y)
        return y,
Exemplo n.º 14
0
    def exec(self):
        A = self.inputs["A"]
        B = self.inputs["B"]
        c_shape_dict = AxisKeyDict()

        for axis in A.order.axes:
            if axis not in self.axes[0]:
                c_shape_dict[axis] = A.shape_dict[axis]

        for axis in B.order.axes:
            if axis not in self.axes[1]:
                c_shape_dict[axis] = B.shape_dict[axis]

        C = Variable(list(c_shape_dict.values()), Order(list(c_shape_dict.keys())))
        self.append_output("C", C)
        for axis in C.order.axes:
            self.attributes.add(Tensorwise(self, axis=axis))
        return C,
Exemplo n.º 15
0
    def __call__(self, x: "variable.Variable"):
        reduced_axis = self.axis

        y_axes = list(x.order.axes)
        y_shape = [
            1 if axis == reduced_axis else x.shape_dict[axis]
            for axis in y_axes
        ]
        y_order = Order(y_axes)

        y = variable.Variable(y_shape, y_order)

        for axis in x.order.axes:
            if axis != reduced_axis:
                self.attributes.add(Tensorwise(axis))

        self.append_input("x", x)
        self.append_output("y", y)
        return y,
Exemplo n.º 16
0
    def __call__(self, x: Variable):
        assert self.in_order.check_same_axes(x.order), f"""
[ReinterpretAxis] Order mismatch:
    (op.in_order) = {self.in_order}
    (x.order) = {x.order}"""

        y = Variable(
            x.shape,
            Order([
                self.out_order.axes[self.in_order.axes_dict[a]]
                for a in x.order.axes
            ]))

        for axis in x.order.axes:
            self.attributes.add(Tensorwise(axis))

        self.append_input("x", x)
        self.append_output("y", y)
        return y,
Exemplo n.º 17
0
    def exec(self):
        x = self.inputs["x"]
        x_shape_dict = x.shape_dict
        N = x_shape_dict[Axis.N]
        H2 = self.outsize[0]
        W2 = self.outsize[1]
        C2 = x_shape_dict[Axis.C]
        y = Variable([N, H2, W2, C2], OrderNHWC)
        y.change_order(x.order)  # output same order as input to preserve following reshape semantics

        self.append_output("y", y)

        for axis in x.order.axes:
            if axis == Axis.H or axis == Axis.W:
                continue

            self.attributes.add(Tensorwise(self, axis))

        return y,
Exemplo n.º 18
0
    def __call__(self, *xs: "variable.Variable"):
        y_axes = []
        y_shape_dict = AxisKeyDict()

        # Check variable in descent order of the number of dimensions.
        # Without this procedure, in case that x0.order=C and x1.order=NC, the output order is CN. Expected result is NC.
        xs_order = [(i, x) for i, x in enumerate(xs)]
        xs_order.sort(key=lambda d: d[1].ndim, reverse=True)

        for i, x in xs_order:
            for axis in x.order.axes:
                if axis in y_axes:
                    if y_shape_dict[axis] == 1:
                        # broadcast
                        y_shape_dict[axis] = x.shape_dict[axis]
                else:
                    y_axes.append(axis)
                    y_shape_dict[axis] = x.shape_dict[axis]

                if Placeholder.check_resolved(x.shape_dict[axis]):
                    if Placeholder.check_resolved(y_shape_dict[axis]):
                        assert y_shape_dict[axis] == x.shape_dict[
                            axis] or x.shape_dict[axis] == 1, f"""
[Elementwise] All input variables of elementwise operator should be same shape:
    (y.shape) = {y_shape_dict[a] for a in y_axes}
    (x{i}.shape) = {x.shape}
    (y.shape[{axis}]) = {y_shape_dict[axis]}
    (x{i}.shape[{axis}]) = {x.shape_dict[axis]}"""

                    else:
                        y_shape_dict[axis] = x.shape_dict[axis]

        # Add tensorwise attributes
        for axis in y_axes:
            self.attributes.add(Tensorwise(axis))

        y = variable.Variable([y_shape_dict[axis] for axis in y_axes],
                              Order(y_axes))

        for i, x in enumerate(xs):
            self.append_input(f"x{i}", x)
        self.append_output("y", y)
        return y,
Exemplo n.º 19
0
    def __call__(self, x: Variable):
        x_shape_dict = x.shape_dict
        N = x_shape_dict[Axis.N]
        H2 = (x_shape_dict[Axis.H] + 2 * self.PH - self.KH +
              (self.SH - 1 if self.cover_all else 0)) // self.SH + 1
        W2 = (x_shape_dict[Axis.W] + 2 * self.PW - self.KW +
              (self.SW - 1 if self.cover_all else 0)) // self.SW + 1
        C2 = x_shape_dict[Axis.C]

        y = Variable([N, H2, W2, C2], OrderNHWC)
        y.change_order(
            x.order
        )  # output same order as input to preserve following reshape semantics

        for axis in x.order.axes:
            if axis == Axis.H or axis == Axis.W:
                continue

            self.attributes.add(Tensorwise(axis))

        self.append_input("x", x)
        self.append_output("y", y)
        return y,
Exemplo n.º 20
0
    def __call__(self, x: Variable):
        # assert index is valid
        for axis, index in self.indices.items():
            if axis in x.order.axes:
                if isinstance(index, slice):
                    index = normalize_slice(index, x.shape_dict[axis])

                    if not (-x.shape_dict[axis] <= index.start <= x.shape_dict[axis]) or \
                        not (-x.shape_dict[axis] <= index.stop <= x.shape_dict[axis]):
                        raise ValueError(f"""
[Slice] Index {index} in {axis} is out of range:
    (x.order) = {x.order} 
    (x.shape) = {x.shape}
    (indices) = {self.indices} 
    (indices[{axis.name}]) = {index}
""")

                    if ((abs(index.stop - index.start) - 1) //
                            abs(index.step)) + 1 < 0:
                        raise ValueError(f"""
[Slice] Slice operator doesn't support 0-size output:
    (x.order) = {x.order}
    (x.shape) = {x.shape}
    (indices) = {self.indices} 
    (indices[{axis.name}]) = {index}
""")

                elif isinstance(index, int):
                    if not -x.shape_dict[axis] <= index < x.shape_dict[axis]:
                        raise ValueError(f"""
[Slice] Index {index} in {axis} is out of range:
    (x.order) = {x.order} 
    (x.shape) = {x.shape}
    (indices) = {self.indices} 
    (indices[{axis.name}]) = {index}
    (valid range) = [{-x.shape_dict[axis]}, {x.shape_dict[axis]}) 
""")

                elif index is None:
                    raise ValueError(f"""
[Slice] Axis {axis} is already exist:
    (x.order) = {x.order} 
    (x.shape) = {x.shape}
    (indices) = {self.indices} 
    (indices[{axis.name}]) = {index}
""")

            else:
                if index is not None:
                    raise ValueError(f"""
[Slice] Axis {axis} is not exist in input variable. In this case, index must be "None" (=insert new axis):
    (x.order) = {x.order} 
    (x.shape) = {x.shape}
    (indices) = {self.indices} 
    (indices[{axis.name}]) = {index}
""")

        if all(isinstance(index, int) for index in self.indices.values()):
            raise NotImplementedError(f"""
[Slice] Accessing to one element is not supported:
    (indices) = {self.indices} 
""")

        self.append_input("x", x)

        # add attribute
        for axis in x.order.axes:
            if axis in self.indices:
                index = self.indices[axis]
                if isinstance(
                        index, slice
                ) and index.start is None and index.stop is None and index.step is None:
                    # This axis is not sliced.
                    self.attributes.add(Tensorwise(self, axis))

            else:
                # This axis is not sliced.
                self.attributes.add(Tensorwise(self, axis))

        return self.exec()
Exemplo n.º 21
0
    def __call__(self, A: Variable, B: Variable):
        for axis in self.axes[0]:
            assert axis in A.order.axes, f"""
[Tensordot] Input variable "A" must have axes "{axis}":
    (op) = {self}
    (op.axes[0]) = {self.axes[0]}
    (A) = {A}"""

        for axis in A.order.axes:
            if axis not in self.axes[0]:
                assert axis in self.axes[1] or axis not in B.order.axes, f"""
[Tensordot] Axes of "A" which are not reduced must not be contained in "B":
    (op) = {self}
    (A.order.axes) = {A.order.axes}
    (B.order.axes) = {B.order.axes}
    (op.axes) = {self.axes}"""

        for axis in self.axes[1]:
            assert axis in B.order.axes, f"""
[Tensordot] Input variable "B" must have axes "{axis}":
    (op) = {self}
    (op.axes[1]) = {self.axes[1]}
    (B) = {B}"""

        for axis in B.order.axes:
            if axis not in self.axes[1]:
                assert axis in self.axes[0] or axis not in A.order.axes, f"""
[Tensordot] Axes of "B" which are not reduced must not be contained in "A":
    (op) = {self}
    (A.order.axes) = {A.order.axes}
    (B.order.axes) = {B.order.axes}
    (op.axes) = {self.axes}"""

        reduction_size_a = mul(A.shape_dict[a] for a in self.axes[0])
        reduction_size_b = mul(B.shape_dict[a] for a in self.axes[1])
        assert reduction_size_a == reduction_size_b, f"""
[Tensordot] Reduction size of "A" and "B" must be same:
    (A) = {A}
    (B) = {B}
    (axes) = {self.axes}
    (reduction size of A) = {reduction_size_a}
    (reduction size of B) = {reduction_size_b}
"""

        c_shape_dict = AxisKeyDict()

        for axis in A.order.axes:
            if axis not in self.axes[0]:
                c_shape_dict[axis] = A.shape_dict[axis]

        for axis in B.order.axes:
            if axis not in self.axes[1]:
                c_shape_dict[axis] = B.shape_dict[axis]

        C = Variable(list(c_shape_dict.values()),
                     Order(list(c_shape_dict.keys())))

        for axis in C.order.axes:
            self.attributes.add(Tensorwise(axis))

        self.append_input("A", A)
        self.append_input("B", B)
        self.append_output("C", C)
        return C,
Exemplo n.º 22
0
 def __init__(self, name: Optional[str], r: int):
     super().__init__(name)
     self.parameters["r"] = int(r)
     self.attributes.add(Tensorwise(self, Axis.N))
Exemplo n.º 23
0
 def __init__(self, name: Optional[str], padding: IntOrTuple):
     super().__init__(name)
     self.parameters["padding"] = to_tuple(padding)
     self.attributes.add(Tensorwise(Axis.C))
     self.attributes.add(Tensorwise(Axis.N))
Exemplo n.º 24
0
def _split_axis(v: Variable, axis: Axis, graph):
    """
    split variable by specified axis
    """
    s1 = v.shape_dict[axis] // 2
    s2 = v.shape_dict[axis] - s1

    if isinstance(v, ConstantVariable):
        v_datum = np.split(v.data, [s1], v.order.axes_dict[axis])
        v1 = ConstantVariable(v_datum[0], v.order)
        v2 = ConstantVariable(v_datum[1], v.order)

    else:
        v1 = Variable([s1 if a == axis else v.shape_dict[a] for a in v.order.axes], v.order)
        v2 = Variable([s2 if a == axis else v.shape_dict[a] for a in v.order.axes], v.order)

    ops = list(v.input_to)
    if v.output_from is not None:
        ops += [v.output_from]

    for op in ops:
        if all(isinstance(v, ConstantVariable) for v in op.inputs.values()):
            op.fold_constance()

        elif isinstance(op, SplitAxis):
            _split_splitaxis(graph, op, v, [v1, v2], axis)

        elif isinstance(op, Concat):
            _split_concat(graph, op, v, [v1, v2], axis)

        elif isinstance(op, Im2Col):
            _split_im2col(graph, op, v, [v1, v2], axis)

        elif isinstance(op, PartialIm2Col):
            _split_partial_im2col(graph, op, v, [v1, v2], axis)

        elif isinstance(op, Reshape):
            _split_reshape(graph, op, v, [v1, v2], axis)

        elif isinstance(op, Sgemm):
            _split_sgemm(graph, op, v, [v1, v2], axis)

        elif Tensorwise.check_splittable(op, axis):
            _split_tensorwise(graph, op, v, [v1, v2], axis)

        else:
            console.debug("-------------------------------------------------")
            console.debug(f"{v}")
            console.debug(f"  original order: {v.order}")
            console.debug(f"  original shape: {v.shape}")
            console.debug(f"")
            console.debug(f"  split axis: {axis}")
            console.debug(f"")
            console.debug(f"  related operators:")
            for related_op in ops:
                console.debug(f"  {related_op}")
            console.debug(f"")

            with open("cg-failed.dot", "w") as f:
                f.write(traverse.dump_dot(graph))

            raise NotImplementedError(f"Variable is too large to handle in WebGL backend: {v}")
Exemplo n.º 25
0
 def __init__(self, name: Optional[str]):
     super().__init__(name)
     self.attributes.add(Tensorwise(self, Axis.N))
     self.attributes.add(Tensorwise(self, Axis.T))