Esempio n. 1
0
def multinomial(input, num_samples, eps=0., out=None):
    """Return a tensor where each row contains ``num_samples``,
     sampled from the multinomial distribution.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    num_samples : int
        The number of samples.
    eps : float, optional, default=0.
        The prob to a uniform sampling.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Multinomial/{}' \
          '/num_samples:{}' \
          '/eps:{}'.format(dev, num_samples, eps)
    module = get_module(
        Multinomial,
        key,
        dev,
        eps=eps,
        num_samples=num_samples,
    )
    return module.forward(input, out)
Esempio n. 2
0
def multinomial(input, num_samples, normalize=False, out=None):
    """Return a tensor where each row contains ``num_samples``,
     sampled from the multinomial distribution.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    num_samples : int
        The number of samples.
    normalize : boolean, optional, default=False
        Whether to normalize the inputs.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Multinomial/{}/num_samples:{}/normalize:{}'.format(
        dev, num_samples, normalize)
    module = get_module(Multinomial,
                        key,
                        dev,
                        num_samples=num_samples,
                        normalize=normalize)
    return module.forward(input, out)
Esempio n. 3
0
def _type_to(input, dtype='float32', inplace=False):
    if dtype == input.dtype: return input
    dev = MakeDevice(inputs=[input])
    key = 'Cast/{}/dtype:{}/inplace:{}'.format(
        dev, dtype, 'true' if inplace else 'false')
    module = get_module(Cast, key, dev, dtype=dtype, inplace=inplace)
    return module.forward(input)
Esempio n. 4
0
def channel_shuffle(input, dim=0, group=1, out=None):
    """Shuffle channels between groups along the given axis.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    dim : int, optional, default=0
        The axis of channels.
    group : int, optional, default=1
        The number of groups.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The new tensor.

    """
    dev = MakeDevice([input])
    key = 'ChannelShuffle/{}/dim:{}/group:{}'.format(dev, dim, group)
    module = get_module(
        ChannelShuffle,
        key,
        dev,
        axis=dim,
        group=group,
    )
    return module.forward(input, out)
Esempio n. 5
0
def _allreduce(grads):
    if not isinstance(grads, (list, tuple)): grads = [grads]
    dev = MakeDevice(inputs=grads)
    mode = mpi.GetParallelMode() + '_ALLREDUCE'
    key = 'Collective/{}/{}'.format(dev, mode.lower())
    module = get_module(Collective, key, dev, mode=mode)
    return module.forward(grads)
Esempio n. 6
0
def gather(input, dim, index, out=None):
    """Gather the input values along the given axis.

    Note that it is a tensorflow style gather, which takes a vector index,

    values of other dimension will be copied automatically.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The values.
    dim : int
        The dim to gather.
    index : dragon.vm.torch.Tensor
        The indices.
    out : dragon.vm.torch.Tensor, optional
        The optional output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input, index], outputs=[out] if out else [])
    key = 'Gather/{}/dim:{}'.format(dev, dim)
    module = get_module(Gather, key, dev, axis=dim)
    return module.forward(input, index, out)
Esempio n. 7
0
def roi_pool(
    feature,
    rois,
    pooled_h,
    pooled_w,
    spatial_scale,
):
    dev = MakeDevice(inputs=[feature])
    key = 'RoIPool/{}' \
          '/pool_h:{}' \
          '/pool_w:{}' \
          '/spatial_scale:{}' \
        .format(dev,
                pooled_h,
                pooled_w,
                spatial_scale)
    module = get_module(
        RoIPool,
        key,
        dev,
        pooled_h=pooled_h,
        pooled_w=pooled_w,
        spatial_scale=spatial_scale,
    )
    return module.forward(feature, rois)
Esempio n. 8
0
def _rfundamental(input, value, op='RAdd', out=None):
    if not isinstance(value, Tensor):
        value = WrapScalar(value, input.dtype, input.device)
    dev = MakeDevice(inputs=[input, value])
    key = '{}/{}'.format(op, dev)
    module = get_module(Fundamental, key, dev, op_type=op)
    return module.forward(value, input, out)
Esempio n. 9
0
def roi_align(
    feature,
    rois,
    pooled_h,
    pooled_w,
    spatial_scale,
    sampling_ratio=2,
):
    dev = MakeDevice(inputs=[feature])
    key = 'RoIAlign/{}' \
          '/pool_h:{}' \
          '/pool_w:{}' \
          '/spatial_scale:{}' \
          '/sampling_ratio:{}' \
        .format(dev,
                pooled_h,
                pooled_w,
                spatial_scale,
                sampling_ratio)
    module = get_module(
        RoIAlign,
        key,
        dev,
        pooled_h=pooled_h,
        pooled_w=pooled_w,
        spatial_scale=spatial_scale,
        sampling_ratio=sampling_ratio,
    )
    return module.forward(feature, rois)
Esempio n. 10
0
def minimum(input, other, out=None):
    """Return the min value of given two tensors.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor or number
        The input tensor.
    other : dragon.vm.torch.Tensor or number
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    if not isinstance(input, Tensor):
        input = WrapScalar(input, other.dtype, other.device)
    elif not isinstance(other, Tensor):
        other = WrapScalar(other, input.dtype, input.device)
    dev = MakeDevice(inputs=[input])
    key = 'Minimum/{}'.format(dev)
    module = get_module(Minimum, key, dev)
    return module.forward(input, other, out)
Esempio n. 11
0
def _reshape(input, shape, shape_like=None):
    if shape_like is not None: shape = shape_like.shape
    dev = MakeDevice(inputs=[input])
    ndim = len(shape)
    key = 'Reshape/{}/ndim:{}'.format(dev, ndim)
    module = get_module(Reshape, key, dev, ndim=ndim)
    return module.forward(input, shape)
Esempio n. 12
0
def _compare(input, other, operation, out=None):
    if not isinstance(other, Tensor):
        other = WrapScalar(other, input.dtype, input.device)
    dev = MakeDevice(inputs=[input, other])
    key = 'Compare/{}/{}'.format(operation, dev)
    module = get_module(Compare, key, dev, operation=operation)
    return module.forward(input, other, out)
Esempio n. 13
0
def _accumulate(grads):
    if len(grads) == 0: return
    if not isinstance(grads, (list, tuple)): grads = [grads]
    dev = MakeDevice(inputs=grads)
    key = 'Accumulate/{}/alpha:1./beta:1.'.format(dev)
    module = get_module(_Accumulate, key, dev)
    return module.forward(grads)
Esempio n. 14
0
def _masked_assign(output, mask, input):
    if not isinstance(input, Tensor):
        if isinstance(input, (tuple, list)):
            input = Tensor(input, dtype=output.dtype, device=output.device)
        else:
            input = WrapScalar(input, output.dtype, output.device)
    dev = MakeDevice(inputs=[input])
    key = 'MaskedAssign/{}'.format(dev)
    module = get_module(MaskedAssign, key, dev)
    return module.forward(input, output, mask)
Esempio n. 15
0
def _update(param, grad, op_type, slot, lr_mult=1.0, decay_mult=1.0):
    dev = MakeDevice(inputs=[param])
    key = '{}/{}/{}/{}'.format(op_type, dev, slot, param.name)
    module = get_module(Update,
                        key,
                        dev,
                        op_type=op_type,
                        lr_mult=lr_mult,
                        decay_mult=decay_mult,
                        slot=slot)
    return module.forward(param, grad)
Esempio n. 16
0
def _reduce(input, operation, dim=None, keepdim=False, out=None):
    if dim is None: keepdim = False
    dev = MakeDevice(inputs=[input])
    key = '{}/{}/dim:{}/keepdim:{}'.format(operation, dev, dim, int(keepdim))
    module = get_module(Reduce,
                        key,
                        dev,
                        operation=operation,
                        dim=dim,
                        keepdim=keepdim)
    return module.forward(input, out)
Esempio n. 17
0
def _assign(output, starts, sizes, input):
    if not isinstance(input, Tensor):
        if isinstance(input, (tuple, list)):
            input = Tensor(input, dtype=output.dtype, device=output.device)
        else:
            input = WrapScalar(input, output.dtype, output.device)
    nstarts, nsizes = len(starts), len(sizes)
    dev = MakeDevice(inputs=[input])
    key = 'Assign/{}/nstarts:{}/nsizes:{}'.format(dev, nstarts, nsizes)
    module = get_module(Assign, key, dev, nstarts=nstarts, nsizes=nsizes)
    return module.forward(input, output, starts, sizes)
Esempio n. 18
0
def _fill(input, shape, value):
    dev = MakeDevice(inputs=[input])
    n_dim = len(shape)
    key = 'Fill/{}/dtype:{}/n_dim:{}/value:{}'.format(dev, input.dtype, n_dim,
                                                      value)
    module = get_module(Fill,
                        key,
                        dev,
                        n_dim=n_dim,
                        value=value,
                        dtype=input.dtype)
    return module.forward(input, shape)
Esempio n. 19
0
def _uniform(input, shape, low, high):
    dev = MakeDevice(inputs=[input])
    n_dim = len(shape)
    key = 'Uniform/{}/dtype:{}/n_dim:{}/low:{}/high:{}'.format(
        dev, input.dtype, n_dim, float(low), float(high))
    module = get_module(RandomUniform,
                        key,
                        dev,
                        n_dim=n_dim,
                        low=low,
                        high=high,
                        dtype=input.dtype)
    return module.forward(input, shape)
Esempio n. 20
0
def _normal(input, shape, mean, std):
    dev = MakeDevice(inputs=[input])
    n_dim = len(shape)
    key = 'Normal/{}/dtype:{}/n_dim:{}/mean:{}/std:{}'.format(
        dev, input.dtype, n_dim, float(mean), float(std))
    module = get_module(RandomNormal,
                        key,
                        dev,
                        n_dim=n_dim,
                        mean=mean,
                        std=std,
                        dtype=input.dtype)
    return module.forward(input, shape)
Esempio n. 21
0
def _arg_reduce(input, operation, dim=None, keepdim=False, top_k=1, out=None):
    if dim is None: keepdim = False
    dev = MakeDevice(inputs=[input])
    key = '{}/{}/dim:{}/keepdim:{}/top_k:{}'.format(operation, dev, dim,
                                                    int(keepdim), top_k)
    module = get_module(ArgReduce,
                        key,
                        dev,
                        operation=operation,
                        axis=dim,
                        keepdim=keepdim,
                        top_k=top_k)
    return module.forward(input, out)
Esempio n. 22
0
def sqrt(input, out=None):
    """Compute the square-root of input.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Sqrt/{}'.format(dev)
    module = get_module(Sqrt, key, dev)
    return module.forward(input, out)
Esempio n. 23
0
def exp(input, out=None):
    """Compute the exponential of input.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Exp/{}'.format(dev)
    module = get_module(Exp, key, dev)
    return module.forward(input, out)
Esempio n. 24
0
def log(input, out=None):
    """Compute the natural logarithm of input.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Log/{}'.format(dev)
    module = get_module(Log, key, dev)
    return module.forward(input, out)
Esempio n. 25
0
def squeeze(input, dim=None, out=None):
    """Return a tensor with all the dimensions of input of size 1 removed.

    Parameters
    ----------
    dim : int
        The optional dim to remove.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The new tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Squeeze/{}/dim:{}'.format(dev, dim if dim else 'None')
    module = get_module(Squeeze, key, dev, dim=dim)
    return module.forward(input, out=out)
Esempio n. 26
0
def unsqueeze(input, dim, out=None):
    """Returns a tensor with a dimension of size 1 inserted at the specified position.

    Parameters
    ----------
    dim : int
        The dim to remove.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The new tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Unsqueeze/{}/dim:{}'.format(dev, dim if dim else 'None')
    module = get_module(UnSqueeze, key, dev, dim=dim)
    return module.forward(input, out=out)
Esempio n. 27
0
def nonzero(input, out=None):
    """Return the indices of non-zero elements.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The optional output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'NonZero/{}'.format(dev)
    module = get_module(NonZero, key, dev)
    return module.forward(input, out)
Esempio n. 28
0
def one_hot(input, depth):
    """Return a ont hot tensor according to given input.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    depth : int
        The depth of channels.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'OneHot/{}/depth:{}'.format(dev, depth)
    module = get_module(OneHot, key, dev, depth=depth)
    return module.forward(input)
Esempio n. 29
0
def _resize_2d(input, op_type, dsize, fx, fy):
    if dsize is None:
        if fx < 0 or fy < 0:
            raise ValueError('Set fx and fy if dsize is None.')
    else:
        if len(dsize) != 2:
            raise ValueError('The dsize should be a list with 2 elements.')
    if dsize is None and (fy == -1.0 or fx == -1.0):
        raise RuntimeError('The dsize, fx/fy should be specified either.')
    dev = MakeDevice(inputs=[input])
    key = '{}/{}/dsize:{}/fx:{}/fy:{}'.format(op_type, dev,
                                              '2' if dsize else 'none', fx, fy)
    module = get_module(Resize2d,
                        key,
                        dev,
                        op_type=op_type,
                        dsize=dsize,
                        fx=fx,
                        fy=fy)
    return module.forward(input, dsize)
Esempio n. 30
0
def cat(seq, dim=0, out=None):
    """Concatenate the inputs along the given axis.

    Parameters
    ----------
    seq : sequence of dragon.vm.torch.Tensor
        The sequence.
    dim : int, optional
        The dim to concatenate.
    out : dragon.vm.torch.Tensor, optional
        The optional output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(seq, [out] if out else [])
    key = 'Concat/{}/dim:{}'.format(dev, dim)
    module = get_module(Concat, key, dev, axis=dim)
    return module.forward(seq, out)