示例#1
0
def transpose(a, axes=None):
    """
    Reverse or permute the axes of an array; returns the modified array.

    Args:
        a (Tensor): a tensor to be transposed
        axes Union[None, tuple, list]: the axes order, if axes is None, transpose
        the entire tensor. Default is None.

    Returns:
        Tensor, the transposed tensor array.

    Supported Platforms:
        ``Ascend`` ``GPU`` ``CPU``

    Examples:
        >>> import mindspore.numpy as np
        >>> x = np.ones((1,2,3))
        >>> x = np.transpose(x)
        >>> print(x,shape)
        (3,2,1)
    """
    if axes is None:
        shape = F.shape(a)
        length = F.tuple_len(shape)
        perm = F.make_range(0, length)
        new_order = F.tuple_reversed(perm)
        return P.Transpose()(a, new_order)

    axes = _check_shape_compile(axes)
    return P.Transpose()(a, axes)
示例#2
0
    def broadcast_params(self, optim_result):
        """
        Apply Broadcast operations in the sequential order of parameter groups.

        Returns:
             bool, the status flag.
        """
        param_group = []
        key_group = []
        for _ in range(self.dev_num):
            param_group.append(F.make_tuple())
            key_group.append(F.make_tuple())
        for i in range(self.param_length):
            param_group[self.param_rank[i]] = param_group[
                self.param_rank[i]] + (self.parameters[i], )
            key = P.MakeRefKey(self.param_names[i])()
            key_group[
                self.param_rank[i]] = key_group[self.param_rank[i]] + (key, )
        new_param_group = []
        for root in range(self.dev_num):
            ops = P.Broadcast(root)
            next_params = ops(param_group[root])
            new_param_group.append(next_params)
            for i in range(F.tuple_len(next_params)):
                F.assign(key_group[root][i], next_params[i])
        status = F.control_depend(optim_result, new_param_group[0][0])
        for i in range(self.dev_num - 1):
            status = F.depend(
                F.control_depend(new_param_group[i],
                                 new_param_group[i + 1][0]), status)

        return status
示例#3
0
def swapaxes(x, axis1, axis2):
    """
    Interchange two axes of a tensor.

    Args:
        x (Tensor): A Tensor to be transposed.
        axis1 (int): First axis.
        axis2 (int): Second axis.

    Returns:
        Transposed Tensor. Has the same data type as the original tensor x.

    Raises:
        TypeError: If axis1 or axis2 is not integer.
        ValueError: If axis1 or axis2 is not in the range from -ndim to ndim-1.

    Supported Platforms:
        ``Ascend`` ``GPU`` ``CPU``

    Examples:
        >>> import mindspore
        >>> import mindspore.numpy as mnp
        >>> from mindspore import Tensor
        >>> import numpy as onp
        >>> input_x = Tensor(onp.ones((2,3,4)), mindspore.float32)
        >>> output = mnp.swapaxes(x, 0, 2)
        >>> print(output.shape)
        (4,3,2)
    """
    _check_is_int(axis1)
    _check_is_int(axis2)

    shape = F.shape(x)
    ndim = F.tuple_len(shape)

    axes = _check_axes_range((axis1, axis2), ndim)
    axis1, axis2 = axes[0], axes[1]

    if axis1 == axis2:
        return x
    if axis1 > axis2:
        axis1, axis2 = axis2, axis1

    perm = F.make_range(0, ndim)
    new_perm = None
    if axis2 + 1 < ndim:
        new_perm = perm[0:axis1] + perm[axis2:axis2+1] + \
            perm[axis1+1:axis2] + perm[axis1:axis1+1] + perm[axis2+1:]
    else:
        new_perm = perm[0:axis1] + perm[axis2:axis2+1] + \
            perm[axis1+1:axis2] + perm[axis1:axis1+1]

    return P.Transpose()(x, new_perm)
示例#4
0
    def bprop(x, out, dout):
        if F.issubclass_(F.typeof(dout), mstype.tensor):
            if F.issubclass_(F.dtype(dout), mstype.bool_):
                return (dout, )
            dx = op(dout, cast(F.scalar_to_array(divisor), dtype(dout)))
            return (dx, )

        dx = ()
        input_nums = F.tuple_len(dout)
        for i in range(input_nums):
            ele_grad = op(dout[i],
                          cast(F.scalar_to_array(divisor), dtype(dout[i])))
            dx = dx + (ele_grad, )
        return (dx, )
示例#5
0
 def get_axis(self, x):
     shape = F.shape(x)
     length = F.tuple_len(shape)
     perm = F.make_range(0, length)
     return perm
示例#6
0
def get_axis(x):
    shape_op = P.Shape()
    shape = shape_op(x)
    length = F.tuple_len(shape)
    perm = F.make_range(0, length)
    return perm
示例#7
0
def rollaxis(x, axis, start=0):
    """
    Roll the specified axis backwards, until it lies in the given position.
    The positions of the other axes do not change relative to one another.

    Args:
        x (Tensor): A Tensor to be transposed.
        axis (int): The axis to be rolled.
        start (int):
            - When start >= 0:
                - When start <= axis: the axis is rolled back until it lies in this position (start).
                - When start > axis: the axis is rolled until it lies before this position (start).
            - When start < 0: the start will be normalized as follows:
                start ........... Normalized start
                -(x.ndim+1)       raise ValueError
                -x.ndim           0
                ...               ...
                -1                x.ndim-1
                0                 0
                ...               ...
                x.ndim            x.ndim
                x.ndim+1          raise ValueError

    Returns:
        Transposed Tensor. Has the same data type as the original tensor x.

    Supported Platforms:
        ``Ascend`` ``GPU`` ``CPU``

    Raises:
        TypeError: If axis or start is not integer.
        ValueError: If axis is not in the range from -ndim to ndim-1 or
            start is not in the range from -ndim to ndim.

    Examples:
        >>> import mindspore
        >>> import mindspore.numpy as mnp
        >>> from mindspore import Tensor
        >>> import numpy as onp
        >>> input_x = Tensor(onp.ones((2,3,4)), mindspore.float32)
        >>> output = mnp.rollaxis(x, 0, 2)
        >>> print(output.shape)
        (3,2,4)
    """
    _check_is_int(axis)
    _check_is_int(start)

    shape = F.shape(x)
    ndim = F.tuple_len(shape)

    axis = _check_axes_range(axis, ndim)
    start = _check_start_normalize(start, ndim)
    if start - axis >= 0 and start - axis <= 1:
        return x
    perm = F.make_range(0, ndim)
    new_perm = None
    if start < axis:
        if axis + 1 < ndim:
            new_perm = perm[0:start] + perm[axis:axis+1] + \
                perm[start:axis] + perm[axis+1:]
        else:
            new_perm = perm[0:start] + perm[axis:axis + 1] + perm[start:axis]
    if start > axis:
        if start < ndim:
            new_perm = perm[0:axis] + perm[axis+1:start] + \
                perm[axis:axis+1] + perm[start:]
        else:
            new_perm = perm[0:axis] + perm[axis+1:start] + \
                perm[axis:axis+1]

    return P.Transpose()(x, new_perm)