Ejemplo n.º 1
0
def minimum(input, other, out=None):
    """Return the min value of given two tensors.

    Parameters
    ----------
    input : dragon.vm.torch.Tensor or number
        The input tensor.
    other : dragon.vm.torch.Tensor or number
        The input tensor.
    out : dragon.vm.torch.Tensor, optional
        The output tensor.

    Returns
    -------
    dragon.vm.torch.Tensor
        The output tensor.

    """
    if not isinstance(input, Tensor):
        input = WrapScalar(input, other.dtype, other.device)
    elif not isinstance(other, Tensor):
        other = WrapScalar(other, input.dtype, input.device)
    dev = MakeDevice(inputs=[input])
    key = 'Minimum/{}'.format(dev)
    module = get_module(Minimum, key, dev)
    return module.forward(input, other, out)
Ejemplo n.º 2
0
def _minimum(input, other, out=None):
    if not isinstance(input, Tensor):
        input = WrapScalar(input, 'float32', other._ctx)
        dtype = other._dtype
    elif not isinstance(other, Tensor):
        other = WrapScalar(other, 'float32', input._ctx)
        dtype = input._dtype
    else:
        dtype = input._dtype
    ctx = MakeContext(inputs=[input])
    key = 'torch/ops/minimum/{}:{}'.format(ctx[0].lower(), ctx[1])
    module = get_module(Minimum, key, ctx)
    return module.forward(input, other, out, dtype)
Ejemplo n.º 3
0
def _compare(input, other, operation, out=None):
    if not isinstance(other, Tensor):
        other = WrapScalar(other, input.dtype, input.device)
    dev = MakeDevice(inputs=[input, other])
    key = 'Compare/{}/{}'.format(operation, dev)
    module = get_module(Compare, key, dev, operation=operation)
    return module.forward(input, other, out)
Ejemplo n.º 4
0
def _rfundamental(input, value, op='RAdd', out=None):
    if not isinstance(value, Tensor):
        value = WrapScalar(value, input.dtype, input.device)
    dev = MakeDevice(inputs=[input, value])
    key = '{}/{}'.format(op, dev)
    module = get_module(Fundamental, key, dev, op_type=op)
    return module.forward(value, input, out)
Ejemplo n.º 5
0
def _rfundamental(input, value, op='RAdd', out=None):
    if not isinstance(value, Tensor):
        value = WrapScalar(value, input._dtype, input._ctx)
    ctx = MakeContext(inputs=[input, value])
    key = 'torch/ops/{}/{}:{}'.format(op.lower(), ctx[0].lower(), ctx[1])
    module = get_module(Fundamental, key, ctx, op_type=op)
    return module.forward(value, input, out)
Ejemplo n.º 6
0
def _masked_assign(output, mask, input):
    if not isinstance(input, Tensor):
        if isinstance(input, (tuple, list)):
            input = Tensor(input, dtype=output.dtype, device=output.device)
        else:
            input = WrapScalar(input, output.dtype, output.device)
    dev = MakeDevice(inputs=[input])
    key = 'MaskedAssign/{}'.format(dev)
    module = get_module(MaskedAssign, key, dev)
    return module.forward(input, output, mask)
Ejemplo n.º 7
0
def _fundamental(input, value, op='Add', out=None):
    if not isinstance(value, Tensor):
        if not isinstance(value, (int, float)):
            raise TypeError(
                'Type of value should be numerical, got {}.'.format(
                    type(value)))
        value = WrapScalar(value, input._dtype, input._ctx)
    ctx = MakeContext(inputs=[input, value])
    key = 'torch/ops/{}/{}:{}'.format(op.lower(), ctx[0].lower(), ctx[1])
    module = get_module(Fundamental, key, ctx, op_type=op)
    return module.forward(input, value, out)
Ejemplo n.º 8
0
def _assign(output, starts, sizes, input):
    if not isinstance(input, Tensor):
        if isinstance(input, (tuple, list)):
            input = Tensor(input, dtype=output.dtype, device=output.device)
        else:
            input = WrapScalar(input, output.dtype, output.device)
    nstarts, nsizes = len(starts), len(sizes)
    dev = MakeDevice(inputs=[input])
    key = 'Assign/{}/nstarts:{}/nsizes:{}'.format(dev, nstarts, nsizes)
    module = get_module(Assign, key, dev, nstarts=nstarts, nsizes=nsizes)
    return module.forward(input, output, starts, sizes)