Example #1
0
def new_empty_op(x,
                 size,
                 dtype=None,
                 device=None,
                 placement=None,
                 sbp=None,
                 requires_grad=False):
    new_size = _single(_handle_size_arg(size))
    new_dtype = dtype
    new_device = device
    new_placement = placement
    new_sbp = sbp

    if dtype is None:
        new_dtype = x.dtype
    if device is None:
        new_device = x.device if x.is_local else None
    if placement is None:
        new_placement = x.placement if x.is_global else None
    if sbp is None:
        new_sbp = x.sbp if x.is_global else None

    return empty_op(
        new_size,
        dtype=new_dtype,
        device=new_device,
        placement=new_placement,
        sbp=new_sbp,
        requires_grad=requires_grad,
    )
Example #2
0
def full_op(
    size: Union[_size_any_t, flow.Size],
    value: Union[float, int],
    dtype: Optional[flow.dtype] = None,
    device: Union[flow.device, str, None] = None,
    placement: flow.placement = None,
    sbp: flow._oneflow_internal.sbp.sbp = None,
    requires_grad: bool = False,
):
    """
    Creates a tensor of size `size` filled with fill_value. 
    The tensor’s dtype is inferred from `value`.

    Args:
        size(int...): a list, tuple, or oneflow.Size of integers defining the shape of the output tensor.
        fill_value(Scalar): the value to fill the output tensor with.
        dtype (flow.dtype, optional): the desired data type of returned tensor.
        device (flow.device, optional): the desired device of returned tensor. Default: if None, uses the current device for the default tensor type
        placement (flow.placement, optional): the desired placement of returned global tensor. Default: if None, the returned tensor is local one using the argument `device`.
        sbp (flow.sbp.sbp or tuple of flow.sbp.sbp, optional): the desired sbp descriptor of returned global tensor. Default: if None, the returned tensor is local one using the argument `device`.
        requires_grad (bool, optional): If autograd should record operations on the returned tensor. Default: False.

    For example:

    .. code-block:: python

        >>> import oneflow as flow
        >>> y = flow.full((5,),5) 
        >>> y
        tensor([5, 5, 5, 5, 5], dtype=oneflow.int64)
        >>> y = flow.full((2,3),5.0) # construct local tensor
        >>> y
        tensor([[5., 5., 5.],
                [5., 5., 5.]], dtype=oneflow.float32)
        >>> placement = flow.placement("cpu", ranks=[0])
        >>> y = flow.full((2,3),5.0, placement=placement, sbp=flow.sbp.broadcast)  # construct global tensor
        >>> y.is_global
        True

    """
    size = _handle_size_arg(size)
    if dtype is None:
        dtype = flow.tensor(value).dtype
    return Full(size, value, dtype, device, placement, sbp, requires_grad)()
Example #3
0
def ones_op(
    *size: Union[_size_any_t, flow.Size, List[int]],
    dtype: Optional[flow.dtype] = None,
    device: Union[flow.device, str, None] = None,
    placement: flow.placement = None,
    sbp: flow._oneflow_internal.sbp.sbp = None,
    requires_grad: bool = False,
):
    """
    Returns a tensor filled with the scalar value 1,
    with the shape defined by the variable argument `size`.

    Args:
        size (an integer or tuple of integer values): defining the shape of the output tensor. Can be \\
         a variable number of arguments or a collection like a list or tuple.
        dtype (flow.dtype, optional): the desired data type of returned tensor.
        device (flow.device, optional): the desired device of returned tensor. Default: if None, uses the current device for the default tensor type
        placement (flow.placement, optional): the desired placement of returned global tensor. Default: if None, the returned tensor is local one using the argument `device`.
        sbp (flow.sbp.sbp or tuple of flow.sbp.sbp, optional): the desired sbp descriptor of returned global tensor. Default: if None, the returned tensor is local one using the argument `device`.
        requires_grad (bool, optional): If autograd should record operations on the returned tensor. Default: False.

    For example:

    .. code-block:: python

        >>> import oneflow as flow
        >>> y = flow.ones(5)
        >>> y
        tensor([1., 1., 1., 1., 1.], dtype=oneflow.float32)
        >>> y = flow.ones(2,3) # construct local tensor
        >>> y
        tensor([[1., 1., 1.],
                [1., 1., 1.]], dtype=oneflow.float32)
        >>> placement = flow.placement("cpu", ranks=[0])
        >>> y = flow.ones(4, 5, placement=placement, sbp=flow.sbp.broadcast) # construct global tensor
        >>> y.is_global
        True


    """
    size = _handle_size_arg(size)
    return Ones(size, dtype, device, placement, sbp, requires_grad)()
Example #4
0
def empty_op(
    *size,
    dtype: Optional[flow.dtype] = None,
    device: Union[flow.device, str] = None,
    placement: flow.placement = None,
    sbp: Union[flow._oneflow_internal.sbp.sbp,
               List[flow._oneflow_internal.sbp.sbp]] = None,
    requires_grad: bool = False,
    pin_memory: bool = False,
):
    """
    Returns a tensor filled with uninitialized data.
    The shape of the tensor is defined by the variable argument ``size``.

    Args:
        size (int... or oneflow.Size): Defining the shape of the output tensor.
          Can be a variable number of arguments or a collection like a list or tuple or oneflow.Size.
        dtype (flow.dtype, optional): The desired data type of returned tensor. Default: ``flow.float32``.
        device (oneflow.device, optional): The desired device of returned local tensor. If None, uses the
          current device.
        placement (flow.placement, optional): The desired device of returned global tensor. If None, will
          construct local tensor.
        sbp (flow.sbp or List[flow.sbp], optional): The desired sbp of returned global tensor.
        requires_grad (bool, optional): If autograd should record operations on the returned tensor. Default: False.
        pin_memory (bool, optional) – If set, returned tensor would be allocated in the pinned memory. Works only for CPU tensors. Default: False.

    For example:

    .. code-block:: python

        >>> import oneflow as flow
        >>> y = flow.empty(4, 5)  # construct local empty tensor
        >>> y.shape
        oneflow.Size([4, 5])
        >>> y.is_global
        False
        >>> placement = flow.placement("cpu", ranks=[0])
        >>> y = flow.empty(4, 5, placement=placement, sbp=flow.sbp.broadcast)  # construct consistent empty tensor
        >>> y.is_global
        True

    """
    assert size is not None, "shape must not be None"

    shape = _single(_handle_size_arg(size))

    if dtype is None:
        dtype = flow.float32
    if placement is None:
        if device is None:
            device = flow.device("cpu")
    else:
        assert (
            device is None
        ), "argument 'device' must be None when argument 'placement' exist"

    if placement is not None:
        assert (
            sbp is not None
        ), "argument 'sbp' must not be None when argument 'placement' exist"
        assert isinstance(
            sbp,
            (flow.sbp.sbp, tuple, list
             )), f"argument 'sbp' must be flow.sbp.sbp, not %s" % (type(sbp))
        if isinstance(sbp, flow.sbp.sbp):
            sbp = (sbp, )
        else:
            for elem in sbp:
                assert isinstance(elem, flow.sbp.sbp), (
                    "Element in argument 'sbp' must be flow.sbp.sbp, not %s" %
                    (type(elem)))
        assert len(sbp) == len(placement.ranks.shape)
    else:
        assert sbp is None, "argument 'sbp' must be None"

    if placement is not None:
        tensor = flow._C.global_empty(shape,
                                      dtype=dtype,
                                      placement=placement,
                                      sbp=sbp)
    else:
        tensor = flow._C.empty(shape,
                               dtype=dtype,
                               device=device,
                               pin_memory=pin_memory)
    tensor.requires_grad_(requires_grad)
    return tensor
Example #5
0
def new_zeros_op(x,
                 size=None,
                 dtype=None,
                 device=None,
                 placement=None,
                 sbp=None,
                 requires_grad=False):
    if isinstance(device, str):
        device = flow.device(device)
    if size is None or len(size) == 0:
        new_size = x.shape
    else:
        new_size = _handle_size_arg(size)
    new_dtype = dtype
    new_device = device
    new_placement = placement
    new_sbp = sbp
    new_requires_grad = requires_grad

    if dtype is None:
        new_dtype = x.dtype
    if device is None:
        new_device = x.device if x.is_local else None
    if placement is None:
        new_placement = x.placement if x.is_global else None
    if sbp is None:
        new_sbp = x.sbp if x.is_global else None
    if new_placement is not None:
        assert (
            device is None
        ), "argument 'device' must be None when argument 'placement' exist"
        assert (
            new_sbp is not None
        ), "argument 'sbp' must not be None when argument 'placement' exist"
    assert isinstance(
        new_size, (int, tuple, list, flow.Size)
    ), f"argument 'size' must be tuple of ints, not %s" % (type(new_size))
    assert isinstance(
        new_dtype,
        flow.dtype), f"argument 'dtype' must be flow.dtype, not %s" % (
            type(new_dtype))
    if new_placement is not None:
        assert isinstance(
            new_placement, flow.placement
        ), f"argument 'placement' must be flow.placement, not %s" % (
            type(new_placement))
        assert isinstance(
            new_sbp, (flow.sbp.sbp, tuple)
        ), f"argument 'sbp' must be flow.sbp.sbp, not %s" % (type(new_sbp))
    else:
        assert isinstance(new_device, (
            str,
            flow.device)), f"argument 'device' must be flow.device, not %s" % (
                type(new_device))
    assert isinstance(
        new_requires_grad,
        bool), f"argument 'requires_grad' must be bool, not %s" % (
            type(new_requires_grad))
    if new_placement is not None:
        res = flow._C.global_constant(new_size,
                                      0.0,
                                      dtype=new_dtype,
                                      placement=new_placement,
                                      sbp=new_sbp)
    else:
        res = flow._C.constant(new_size,
                               0.0,
                               dtype=new_dtype,
                               device=new_device)
    res.requires_grad = new_requires_grad
    return res
Example #6
0
def rand_op(*size,
            out=None,
            generator=None,
            dtype: Optional[flow.dtype] = None,
            layout=None,
            device: Union[flow.device, str, None] = None,
            placement: flow.placement = None,
            sbp: flow._oneflow_internal.sbp.sbp = None,
            requires_grad: bool = False):
    """
    Returns a tensor filled with random numbers from a uniform distribution on the interval [0, 1)

    The shape of the tensor is defined by the variable argument ``size``.

    Args:
        size (int... or oneflow.Size): Defining the shape of the output tensor.
          Can be a variable number of arguments or a collection like a list or tuple or oneflow.Size.
        out (optional): The output tensor.
        dtype (flow.dtype, optional): The desired data type of returned tensor. Default: ``flow.float32``.
        layout (optional): The desired layout of returned Tensor.
        generator (flow.Generator, optional): a pseudorandom number generator for sampling
        device (flow.device, optional): The desired device of returned local tensor. If None, uses the
          current device.
        placement (flow.placement, optional): The desired device of returned global tensor. If None, will
          construct local tensor.
        sbp (flow.sbp, optional): The desired sbp of returned global tensor. It must be equal with the
          numbers of placement.
        requires_grad (bool, optional): If autograd should record operations on the returned tensor. Default: False.

    For example:

    .. code-block:: python

        >>> import oneflow as flow
        >>> x = flow.rand(3,3) # construct local tensor
        >>> x.shape
        oneflow.Size([3, 3])
        >>> x.is_global
        False
        >>> placement = flow.placement("cpu", ranks=[0])
        >>> sbp = flow.sbp.broadcast
        >>> x = flow.rand(3, 3, placement=placement, sbp=sbp) # construct global tensor
        >>> x.is_global
        True


    """
    size = _handle_size_arg(size)
    assert out is None, "out not supported yet"
    assert layout is None, "layout not supported yet"
    if placement is not None:
        return flow._C.rand(
            size=size,
            placement=placement,
            sbp=sbp,
            dtype=dtype,
            generator=generator,
            requires_grad=requires_grad,
        )
    else:
        return flow._C.rand(
            size=size,
            dtype=dtype,
            device=device,
            generator=generator,
            requires_grad=requires_grad,
        )
Example #7
0
def empty_op(
    *size,
    dtype: Optional[flow.dtype] = None,
    device: Union[flow.device, str] = None,
    placement: flow.placement = None,
    sbp: Union[flow._oneflow_internal.sbp.sbp,
               List[flow._oneflow_internal.sbp.sbp]] = None,
    requires_grad: bool = False,
):
    """
    Returns a tensor filled with uninitialized data.
    The shape of the tensor is defined by the variable argument ``size``.

    Args:
        size (int... or oneflow.Size): Defining the shape of the output tensor.
          Can be a variable number of arguments or a collection like a list or tuple or oneflow.Size.
        dtype (flow.dtype, optional): The desired data type of returned tensor. Default: ``flow.float32``.
        device (torch.device, optional): The desired device of returned local tensor. If None, uses the
          current device.
        placement (flow.placement, optional): The desired device of returned consistent tensor. If None, will
          construct local tensor.
        sbp (flow.sbp or List[flow.sbp], optional): The desired sbp of returned consistent tensor.
        requires_grad (bool, optional): If autograd should record operations on the returned tensor. Default: False.

    For example:

    .. code-block:: python

        >>> import oneflow as flow
        >>> y = flow.empty(4, 5)  # construct local empty tensor
        >>> y.shape
        oneflow.Size([4, 5])
        >>> y.is_consistent
        False
        >>> placement = flow.placement("cpu", {0: [0]})
        >>> y = flow.empty(4, 5, placement=placement, sbp=flow.sbp.broadcast)  # construct consistent empty tensor
        >>> y.is_consistent
        True

    """
    assert size is not None, "shape must not be None"

    shape = _single(_handle_size_arg(size))
    if dtype is None:
        dtype = flow.float32
    if placement is None:
        if device is None:
            device = flow.device("cpu")
    else:
        assert device is None

    if placement is not None:
        assert isinstance(sbp, (flow.sbp.sbp, tuple, list)), "sbp: %s" % sbp
        if isinstance(sbp, flow.sbp.sbp):
            sbp = (sbp, )
        else:
            for elem in sbp:
                assert isinstance(elem, flow.sbp.sbp), "sbp: %s" % sbp
        assert len(sbp) == len(placement.hierarchy)
    else:
        assert sbp is None, "sbp: %s" % sbp

    if placement is not None:
        tensor = flow._C.consistent_empty(shape,
                                          dtype=dtype,
                                          placement=placement,
                                          sbp=sbp)
    else:
        tensor = flow._C.empty(shape, dtype=dtype, device=device)
    tensor.requires_grad_(requires_grad)
    return tensor
Example #8
0
def expand_op(input, *sizes):
    sizes = _handle_size_arg(sizes)
    sizes = _single(sizes)
    return flow._C.expand(input, sizes)