Exemplo n.º 1
0
def _arg_reduce(input, operation, dim=None, keepdim=False, top_k=1, out=None):
    if dim is None: keepdim = False
    dev = MakeDevice(inputs=[input])
    key = '{}/{}/dim:{}/keepdim:{}/top_k:{}'.format(operation, dev, dim,
                                                    int(keepdim), top_k)
    module = get_module(ArgReduce,
                        key,
                        dev,
                        operation=operation,
                        axis=dim,
                        keepdim=keepdim,
                        top_k=top_k)
    return module.forward(input, out)
Exemplo n.º 2
0
def _maximum(input, other, out=None):
    if not isinstance(input, Tensor):
        input = WrapScalar(input, 'float32', other._ctx)
        dtype = other._dtype
    elif not isinstance(other, Tensor):
        other = WrapScalar(other, 'float32', input._ctx)
        dtype = input._dtype
    else:
        dtype = input._dtype
    ctx = MakeContext(inputs=[input])
    key = 'torch/ops/maximum/{}:{}'.format(ctx[0].lower(), ctx[1])
    module = get_module(Maximum, key, ctx)
    return module.forward(input, other, out, dtype)
Exemplo n.º 3
0
def _normal(input, shape, mean, std):
    dev = MakeDevice(inputs=[input])
    n_dim = len(shape)
    key = 'Normal/{}/dtype:{}/n_dim:{}/mean:{}/std:{}'.format(
        dev, input.dtype, n_dim, float(mean), float(std))
    module = get_module(RandomNormal,
                        key,
                        dev,
                        n_dim=n_dim,
                        mean=mean,
                        std=std,
                        dtype=input.dtype)
    return module.forward(input, shape)
Exemplo n.º 4
0
def _uniform(input, shape, low, high):
    dev = MakeDevice(inputs=[input])
    n_dim = len(shape)
    key = 'Uniform/{}/dtype:{}/n_dim:{}/low:{}/high:{}'.format(
        dev, input.dtype, n_dim, float(low), float(high))
    module = get_module(RandomUniform,
                        key,
                        dev,
                        n_dim=n_dim,
                        low=low,
                        high=high,
                        dtype=input.dtype)
    return module.forward(input, shape)
Exemplo n.º 5
0
def _fill(input, shape, value):
    dev = MakeDevice(inputs=[input])
    ndim = len(shape)
    key = 'Fill/{}/dtype:{}/ndim:{}/value:{}' \
        .format(dev, input.dtype, ndim, value)
    module = get_module(
        Fill,
        key,
        dev,
        ndim=ndim,
        value=value,
        dtype=input.dtype,
    )
    return module.forward(input, shape)
Exemplo n.º 6
0
def _assigning(output, input, starts, sizes):
    if not isinstance(input, Tensor):
        if isinstance(input, (tuple, list)):
            input = Tensor(input, dtype=output.dtype, device=output.device)
        else:
            input = WrapScalar(input, output.dtype, output.device)
    n_starts, n_sizes = len(starts), len(sizes)
    dev = MakeDevice(inputs=[input])
    key = 'Assign/{}/n_starts:{}/n_sizes:{}'.format(dev, n_starts, n_sizes)
    module = get_module(Assigning,
                        key,
                        dev,
                        n_starts=n_starts,
                        n_sizes=n_sizes)
    return module.forward(input, output, starts, sizes)
Exemplo n.º 7
0
def _reduce(input, operation, dim=None, keepdim=False, out=None):
    ctx = MakeContext(inputs=[input])
    if dim is None:
        dim = -1
        keepdim = False
    elif dim < 0:
        dim = CanonicalAxis(input, dim)
    key = 'torch/ops/{}/{}:{}/dim[{}]/keep_dims:{}'.format(
        operation.lower(), ctx[0].lower(), ctx[1], dim, int(keepdim))
    module = get_module(Reduce,
                        key,
                        ctx,
                        operation=operation,
                        axis=dim,
                        keep_dims=keepdim)
    return module.forward(input, out)
Exemplo n.º 8
0
def roi_align(feature,
              rois,
              pooled_h,
              pooled_w,
              spatial_scale,
              sampling_ratio=2):
    dev = MakeDevice(inputs=[feature])
    key = 'RoIAlign/{}/pool_h:{}/pool_w:{}/' \
          'spatial_scale:{}/sampling_ratio:{}'.format(
        dev, pooled_h, pooled_w, spatial_scale, sampling_ratio)
    module = get_module(RoIAlign,
                        key,
                        dev,
                        pooled_h=pooled_h,
                        pooled_w=pooled_w,
                        spatial_scale=spatial_scale,
                        sampling_ratio=sampling_ratio)
    return module.forward(feature, rois)
Exemplo n.º 9
0
def roi_align(feature,
              rois,
              pooled_h,
              pooled_w,
              spatial_scale,
              sampling_ratio=2):
    ctx = MakeContext(inputs=[feature])
    key = 'torch/ops/roi_align/{}:{}/pool_h:{}/pool_w:{}/' \
          'spatial_scale:{}/sampling_ratio:{}'.format(
        ctx[0].lower(), ctx[1], pooled_h, pooled_w, spatial_scale, sampling_ratio)
    module = get_module(RoIAlign,
                        key,
                        ctx,
                        pooled_h=pooled_h,
                        pooled_w=pooled_w,
                        spatial_scale=spatial_scale,
                        sampling_ratio=sampling_ratio)
    return module.forward(feature, rois)
Exemplo n.º 10
0
def one_hot(input, depth):
    """Return a ont hot tensor according to given input.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    depth : int
        The depth of channels.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'OneHot/{}/depth:{}'.format(dev, depth)
    module = get_module(OneHot, key, dev, depth=depth)
    return module.forward(input)
Exemplo n.º 11
0
def nonzero(input, out=None):
    """Return the indices of non-zero elements.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The optional output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'NonZero/{}'.format(dev)
    module = get_module(NonZero, key, dev)
    return module.forward(input, out)
Exemplo n.º 12
0
def unsqueeze(input, dim, out=None):
    """Returns a tensor with a dimension of size 1 inserted at the specified position.

    Parameters
    ----------
    dim : int
        The dim to remove.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The new tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Unsqueeze/{}/dim:{}'.format(dev, dim if dim else 'None')
    module = get_module(UnSqueeze, key, dev, dim=dim)
    return module.forward(input, out=out)
Exemplo n.º 13
0
def squeeze(input, dim=None, out=None):
    """Return a tensor with all the dimensions of input of size 1 removed.

    Parameters
    ----------
    dim : int
        The optional dim to remove.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The new tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Squeeze/{}/dim:{}'.format(dev, dim if dim else 'None')
    module = get_module(Squeeze, key, dev, dim=dim)
    return module.forward(input, out=out)
Exemplo n.º 14
0
def _update(
    param,
    grad,
    op_type,
    slot,
    lr_mult=1.0,
    decay_mult=1.0,
):
    dev = MakeDevice(inputs=[param])
    key = '{}/{}/{}/{}'.format(op_type, dev, slot, param.name)
    module = get_module(
        Update,
        key,
        dev,
        op_type=op_type,
        lr_mult=lr_mult,
        decay_mult=decay_mult,
        slot=slot,
    )
    return module.forward(param, grad)
Exemplo n.º 15
0
def _resize_2d(input, op_type, dsize, fx, fy):
    if dsize is None:
        if fx < 0 or fy < 0:
            raise ValueError('Set fx and fy if dsize is None.')
    else:
        if len(dsize) != 2:
            raise ValueError('The dsize should be a list with 2 elements.')
    if dsize is None and (fy == -1.0 or fx == -1.0):
        raise RuntimeError('The dsize, fx/fy should be specified either.')
    dev = MakeDevice(inputs=[input])
    key = '{}/{}/dsize:{}/fx:{}/fy:{}'.format(op_type, dev,
                                              '2' if dsize else 'none', fx, fy)
    module = get_module(Resize2d,
                        key,
                        dev,
                        op_type=op_type,
                        dsize=dsize,
                        fx=fx,
                        fy=fy)
    return module.forward(input, dsize)
Exemplo n.º 16
0
def sqrt(input, out=None):
    """Compute the square-root of input.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Sqrt/{}'.format(dev)
    module = get_module(Sqrt, key, dev)
    return module.forward(input, out)
Exemplo n.º 17
0
def exp(input, out=None):
    """Compute the exponential of input.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Exp/{}'.format(dev)
    module = get_module(Exp, key, dev)
    return module.forward(input, out)
Exemplo n.º 18
0
def log(input, out=None):
    """Compute the natural logarithm of input.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Log/{}'.format(dev)
    module = get_module(Log, key, dev)
    return module.forward(input, out)
Exemplo n.º 19
0
def chunk(tensor, chunks, dim=0):
    """Split the input into several parts along the given axis.

    Parameters
    ----------
    tensor : dragon.vm.torch.Tensor
        The input to split.
    chunks : int
        The number of chunks to split.
    dim : int, optional, default=0
        The dim to split.

    Returns
    -------
    sequence of dragon.vm.torch.Tensor
        The output chunks.

    """
    dev = MakeDevice([tensor])
    key = 'Chunk/{}/chunks:{}/dim:{}'.format(dev, chunks, dim)
    module = get_module(Chunk, key, dev, axis=dim, chunks=chunks)
    return module.forward(tensor)
Exemplo n.º 20
0
def cat(seq, dim=0, out=None):
    """Concatenate the inputs along the given axis.

    Parameters
    ----------
    seq : tuple or list of vm.torch.Tensor
        The sequence.
    dim : int
        The dim to concatenate.
    out : vm.torch.Tensor or None
        The optional output tensor.

    Returns
    -------
    vm.torch.Tensor
        The output tensor.

    """
    ctx = MakeContext(inputs=seq, outputs=[out] if out else [])
    key = 'torch/ops/cat/{}:{}/dim:{}'.format(ctx[0].lower(), ctx[1], dim)
    module = get_module(Concat, key, ctx, axis=dim)
    return module.forward(seq, out)
Exemplo n.º 21
0
def cat(seq, dim=0, out=None):
    """Concatenate the inputs along the given axis.

    Parameters
    ----------
    seq : sequence of dragon.vm.torch.Tensor
        The sequence.
    dim : int, optional
        The dim to concatenate.
    out : dragon.vm.torch.Tensor, optional
        The optional output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(seq, [out] if out else [])
    key = 'Concat/{}/dim:{}'.format(dev, dim)
    module = get_module(Concat, key, dev, axis=dim)
    return module.forward(seq, out)
Exemplo n.º 22
0
def where(condition, x, y):
    """Select elements from either ``x`` or ``y``, depending on ``condition``.

    Parameters
    ----------
    condition : dragon.vm.torch.Tensor
        The byte condition tensor.
    x : dragon.vm.torch.Tensor
        The elements for *1*.
    y : dragon.vm.torch.Tensor
        The elements for *0*.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[condition, x, y])
    key = 'Where/{}'.format(dev)
    module = get_module(Where, key, dev)
    return module.forward(condition, x, y)
Exemplo n.º 23
0
def masked_select(input, mask, out=None):
    """Select the input values where mask is *1*.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The values.
    mask : dragon.vm.torch.Tensor
        The mask to select values.
    out : dragon.vm.torch.Tensor, optional
        The optional output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice([input, mask], [out] if out else [])
    key = 'MaskedSelect/{}'.format(dev)
    module = get_module(MaskedSelect, key, dev)
    return module.forward(input, mask, out)
Exemplo n.º 24
0
def clamp(input, min=None, max=None, out=None):
    """Clamp all elements into the range [min, max].

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    min : number, optional
        The min value.
    max : number, optional
        The max value.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Clamp/{}/min:{}/max:{}'.format(dev, min, max)
    module = get_module(Clamp, key, dev, min=min, max=max)
    return module.forward(input, out)
Exemplo n.º 25
0
def index_select(input, dim, index, out=None):
    """Select the input values along the given axis using index.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The values.
    dim : int
        The dim to gather.
    index : dragon.vm.torch.Tensor
        The indices.
    out : dragon.vm.torch.Tensor, optional
        The optional output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice([input, index], [out] if out else [])
    key = 'IndexSelect/{}/dim:{}'.format(dev, dim)
    module = get_module(IndexSelect, key, dev, axis=dim)
    return module.forward(input, index, out)
Exemplo n.º 26
0
def xw_plus_b(x, w, bias=None, transW=True, out=None):
    """Compute *matmul(x, w) + bias.*``

    Parameters
    ----------
    x : dragon.vm.torch.Tensor
        The x.
    w : dragon.vm.torch.Tensor
        The w.
    bias : dragon.vm.torch.Tensor, optional
        The bias.
    transW : boolean
        Whether to transpose the ``w``.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[x, w] + ([bias] if bias else []))
    key = 'FullyConnected/{}/transW:{}'.format(dev, transW)
    module = get_module(FullyConnected, key, dev, transW=transW)
    return module.forward(x, w, bias, out)
Exemplo n.º 27
0
def mm(mat1, mat2, transA=False, transB=False, out=None):
    """Performs a matrix multiplication of the matrices ``mat1`` and ``mat2.``

    Parameters
    ----------
    mat1 : dragon.vm.torch.Tensor
        The matrix A.
    mat2 : dragon.vm.torch.Tensor
        The matrix B.
    transA : boolean
        Whether to transpose the ``mat1``.
    transB : boolean
        Whether to transpose the ``mat2``.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[mat1, mat2])
    key = 'Matmul/{}/transA:{}/transB:{}'.format(dev, transA, transB)
    module = get_module(MM, key, dev, transA=transA, transB=transB)
    return module.forward(mat1, mat2, out)
Exemplo n.º 28
0
def accumulate(input, alpha=1., beta=1., out=None):
    """Compute *out = alpha * input + beta * out*

    Parameters
    ----------
    input : dragon.vm.torch.Tensor
        The input tensor.
    alpha : float, optional, default=1.
        The value of alpha.
    beta : float, optional, default=1.
        The value beta.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    dev = MakeDevice(inputs=[input])
    key = 'Accumulate/{}/alpha:{}/beta:{}'.format(dev, alpha, beta)
    module = get_module(Accumulate, key, dev, alpha=alpha, beta=beta)
    return module.forward(input, out)
Exemplo n.º 29
0
def unsqueeze(input, dim, out=None):
    ctx = MakeContext(inputs=[input])
    key = 'torch/ops/unsqueeze/{}:{}/dim:{}'.format(ctx[0].lower(), ctx[1],
                                                    dim if dim else 'None')
    module = get_module(UnSqueeze, key, ctx, dim=dim)
    return module.forward(input, out=out)
Exemplo n.º 30
0
def _index(input, starts, sizes):
    nstarts, nsizes = len(starts), len(sizes)
    dev = MakeDevice(inputs=[input])
    key = 'Index/{}/nstarts:{}/nsizes:{}'.format(dev, nstarts, nsizes)
    module = get_module(Indexing, key, dev, nstarts=nstarts, nsizes=nsizes)
    return module.forward(input, starts, sizes)