Exemple #1
0
def std(x, axis=None, ddof=0, keepdims=False, requires_grad=False):
    return Tensor._op(
        StdDev,
        x,
        op_kwargs=dict(axis=axis, keepdims=keepdims, ddof=ddof),
        requires_grad=requires_grad,
    )
Exemple #2
0
def min(x, axis=None, keepdims=False, requires_grad=False):
    return Tensor._op(
        MaxMin,
        x,
        op_kwargs=dict(axis=axis, keepdims=keepdims, maxmin="min"),
        requires_grad=requires_grad,
    )
Exemple #3
0
def reshape(a, *newshape, requires_grad=False):
    if not newshape:
        raise TypeError("reshape() takes at least 1 argument (0 given)")
    return Tensor._op(Reshape,
                      a,
                      op_args=(newshape, ),
                      requires_grad=requires_grad)
Exemple #4
0
def var(x, axis=None, ddof=0, keepdims=False, requires_grad=False):

    return Tensor._op(
        Variance,
        x,
        op_kwargs=dict(axis=axis, keepdims=keepdims, ddof=ddof),
        requires_grad=requires_grad,
    )
Exemple #5
0
def swapaxes(a, axis1, axis2, requires_grad=False):
    """
    交换Tensor的两个维度
    """
    return Tensor._op(SwapAxes,
                      a,
                      op_args=(axis1, axis2),
                      requires_grad=requires_grad)
Exemple #6
0
def permute(a, *axes, requires_grad=False):
    """
    重新排列Tensor的各个维度,等同于numpy中的np.transpose操作
    """
    if not axes:
        axes = None
    return Tensor._op(Permute,
                      a,
                      op_args=(axes, ),
                      requires_grad=requires_grad)
Exemple #7
0
def where(condition, x=None, y=None, requires_grad=False):
    if x is None and y is None:
        if isinstance(condition, Tensor):
            condition = condition.data
        return np.where(condition)

    return Tensor._op(Where,
                      x,
                      y,
                      op_kwargs=dict(condition=condition),
                      requires_grad=requires_grad)
Exemple #8
0
def transpose(a, *axes, requires_grad=False):
    """
    转置矩阵,目前适用二维Tensor
    :param a:
    :param axes:
    :param requires_grad:
    :return:
    """
    if (a.ndim < 2):
        raise NotImplemented("此处应当自动扩维,但是还未实现")
    alist = list(range(a.ndim))
    if not axes:  # 若不指定axes,则默认置换最后两个维度
        axes = [a.ndim - 1, a.ndim - 2]
    alist[axes[0]], alist[axes[1]] = alist[axes[1]], alist[axes[0]],
    return Tensor._op(Permute,
                      a,
                      op_args=(alist, ),
                      requires_grad=requires_grad)
Exemple #9
0
def einsum(*operands, optimize=False, requires_grad=False):
    # 这段没有验证过,直接超过来的
    operands = list(operands)
    if isinstance(operands[0], str):
        # operands form: "ijk, ijk", x, y
        variables = operands[1:]
        if any(isinstance(i, Tensor) for i in operands):
            operands[1:] = (var.data if isinstance(var, Tensor) else var
                            for var in operands[1:])
    else:
        # operands form: op0, sublist0, op1, sublist1, ..., [sublistout]
        end = -1 if len(operands) % 2 else None  # -1 if sublistout is included
        variables = operands[:end:2]
        if any(isinstance(i, Tensor) for i in operands):
            operands[:end:2] = (var.data if isinstance(var, Tensor) else var
                                for var in operands[:end:2])

    in_lbls, out_lbls, _ = _parse_einsum_input(operands)
    return Tensor._op(EinSum,
                      *variables,
                      op_kwargs=dict(in_lbls=in_lbls,
                                     out_lbls=out_lbls,
                                     optimize=optimize),
                      requires_grad=requires_grad)
Exemple #10
0
def positive(a, requires_grad=False):
    return Tensor._op(Positive, a, requires_grad=requires_grad)
Exemple #11
0
def log10(a, requires_grad=False):
    return Tensor._op(Log10, a, requires_grad=requires_grad)
Exemple #12
0
def exp(a, requires_grad=False):
    return Tensor._op(Exp, a, requires_grad=requires_grad)
Exemple #13
0
def matmul(a, b, requires_grad=False):
    return Tensor._op(MatMul, a, b, requires_grad=requires_grad)
Exemple #14
0
def add(a, b, requires_grad=False):
    return Tensor._op(Add, a, b, requires_grad=requires_grad)
Exemple #15
0
def divide(a, b, requires_grad=False):
    return Tensor._op(Divide, a, b, requires_grad=requires_grad)
Exemple #16
0
def subtract(a, b, requires_grad=False):
    return Tensor._op(Subtract, a, b, requires_grad=requires_grad)
Exemple #17
0
def minimum(a, b, requires_grad=False):
    return Tensor._op(Minimum, a, b, requires_grad=requires_grad)
Exemple #18
0
def flatten(a, requires_grad=False):
    return Tensor._op(Flatten, a, requires_grad=requires_grad)
Exemple #19
0
def squeeze(a, axis=None, requires_grad=False):
    return Tensor._op(Squeeze,
                      a,
                      op_args=(axis, ),
                      requires_grad=requires_grad)
Exemple #20
0
def multiply(a, b, requires_grad=False):
    return Tensor._op(Multiply, a, b, requires_grad=requires_grad)
Exemple #21
0
def power(a, b, requires_grad=False):
    return Tensor._op(Power, a, b, requires_grad=requires_grad)
Exemple #22
0
def expand_dims(a, axis, requires_grad=False):
    return Tensor._op(ExpandDims,
                      a,
                      op_args=(axis, ),
                      requires_grad=requires_grad)
Exemple #23
0
def negative(a, requires_grad=False):
    return Tensor._op(Negative, a, requires_grad=requires_grad)
Exemple #24
0
def cbrt(a, requires_grad=False):
    return Tensor._op(Cbrt, a, requires_grad=requires_grad)
Exemple #25
0
def sum(x, axis=None, keepdims=False, requires_grad=False):
    return Tensor._op(Sum,
                      x,
                      op_args=(axis, keepdims),
                      requires_grad=requires_grad)
Exemple #26
0
def abs(a, requires_grad=False):
    return Tensor._op(Abs, a, requires_grad=requires_grad)
Exemple #27
0
def mean(x, axis=None, keepdims=False, requires_grad=False):
    return Tensor._op(Mean,
                      x,
                      op_args=(axis, keepdims),
                      requires_grad=requires_grad)