예제 #1
0
파일: mxnet.py 프로젝트: wz741598953/d2l-zh
def try_all_gpus():
    """返回所有可用的GPU,如果没有GPU,则返回[cpu()]。"""
    devices = [npx.gpu(i) for i in range(npx.num_gpus())]
    return devices if devices else [npx.cpu()]
예제 #2
0
def try_gpu(i=0):  #@save
    """Return gpu(i) if exists, otherwise return cpu()."""
    return npx.gpu(i) if npx.num_gpus() >= i + 1 else npx.cpu()
예제 #3
0
파일: mxnet.py 프로젝트: wz741598953/d2l-zh
def try_gpu(i=0):
    """如果存在,则返回gpu(i),否则返回cpu()。"""
    return npx.gpu(i) if npx.num_gpus() >= i + 1 else npx.cpu()
예제 #4
0
from mxnet import np, npx
from mxnet.gluon import nn

npx.set_np()

npx.cpu(), npx.gpu(), npx.gpu(1)


def try_gpu(i=0):  #@save
    """Return gpu(i) if exists, otherwise return cpu()."""
    return npx.gpu(i) if npx.num_gpus() >= i + 1 else npx.cpu()


def try_all_gpus():  #@save
    """Return all available GPUs, or [cpu()] if no GPU exists."""
    devices = [npx.gpu(i) for i in range(npx.num_gpus())]
    return devices if devices else [npx.cpu()]


try_gpu(), try_gpu(10), try_all_gpus()
예제 #5
0
def try_all_gpus():  #@save
    """Return all available GPUs, or [cpu()] if no GPU exists."""
    devices = [npx.gpu(i) for i in range(npx.num_gpus())]
    return devices if devices else [npx.cpu()]
예제 #6
0
def test_np_fallback_decorator():
    @numpy_fallback
    def dnp_func(a, b=None, split_inputs=(), ret_type=list):
        """
        Dummy Doc:
        dnp_func is using the same np.xxx operators
        """
        ret_lst = []
        # unsupported indexing case
        ret_lst.append(a[:, a[1, :] > 0])
        # unsupported operator
        ret_lst.append(np.nonzero(b))
        # unsupported operator case
        ret_lst.append(tuple(np.split(split_inputs[0], split_inputs[1])))

        return ret_type(ret_lst)

    def onp_func(a, b=None, split_inputs=(), ret_type=list):
        ret_lst = []
        ret_lst.append(a[:, a[1, :] > 0])
        ret_lst.append(_np.nonzero(b))
        ret_lst.append(tuple(_np.split(split_inputs[0], split_inputs[1])))
        return ret_type(ret_lst)

    def get_indices(axis_size):
        if axis_size is 0:
            axis_size = random.randint(3, 6)
        samples = random.randint(1, axis_size - 1)
        indices = sorted(
            random.sample([i for i in range(1, axis_size)], samples))
        indices = tuple(indices)
        return indices

    ret_type = list if random.uniform(0.0, 1.0) > 0.5 else tuple
    mx_a = np.array([[1, 2, 3], [3, 4, 5]])
    np_b = _np.random.uniform(size=(3, 4)) > 0.5
    mx_b = np.array(np_b, dtype=np_b.dtype)
    mx_c_len = random.randint(5, 20)
    mx_c = np.random.uniform(size=(mx_c_len, ))
    mx_indices = np.array(get_indices(mx_c_len), dtype=np.int64)
    assert dnp_func.__doc__ is not None
    assert 'onp' not in dnp_func.__doc__
    fallback_ret = dnp_func(mx_a,
                            b=mx_b,
                            split_inputs=(mx_c, mx_indices),
                            ret_type=ret_type)
    onp_ret = onp_func(mx_a.asnumpy(),
                       b=mx_b.asnumpy(),
                       split_inputs=(mx_c.asnumpy(), mx_indices.asnumpy()),
                       ret_type=ret_type)
    for fallback_out, onp_out in zip(fallback_ret, onp_ret):
        if isinstance(fallback_out, (list, tuple)):
            for fallback_item, onp_item in zip(fallback_out, onp_out):
                assert fallback_item.ctx == mx.context.current_context(
                ), "incorrect output context %s vs desired %s" % (str(
                    fallback_item.ctx), str(mx.context.current_context()))
                assert isinstance(fallback_item, np.ndarray)
                assert_almost_equal(fallback_item.asnumpy(),
                                    onp_item,
                                    rtol=1e-3,
                                    atol=1e-5,
                                    equal_nan=False)
        else:
            assert fallback_out.ctx == mx.context.current_context(
            ), "incorrect output context %s vs desired %s" % (str(
                fallback_out.ctx), str(mx.context.current_context()))
            assert isinstance(fallback_out, np.ndarray)
            assert_almost_equal(fallback_out.asnumpy(),
                                onp_out,
                                rtol=1e-3,
                                atol=1e-5,
                                equal_nan=False)

    # does not support mixed-context inputs
    assertRaises(AssertionError,
                 dnp_func,
                 mx_a.as_in_ctx(npx.cpu(0)),
                 b=mx_b,
                 split_inputs=(mx_c, mx_indices),
                 ret_type=ret_type)
    assertRaises(AssertionError,
                 dnp_func,
                 mx_a,
                 b=mx_b,
                 split_inputs=(mx_c.as_in_ctx(npx.cpu(0)),
                               mx_indices.as_in_ctx(npx.gpu(0))),
                 ret_type=ret_type)

    @numpy_fallback
    def empty_ret_func():
        return

    # does not support functions with no return values
    assertRaises(ValueError, empty_ret_func)
예제 #7
0
파일: gpu_mxnet.py 프로젝트: nikisix/d2l
from mxnet import np, npx
from mxnet.gluon import nn
npx.set_np()

print(npx.gpu(0))


def gpu_device(gpu_number=0):
    try:
        _ = mx.nd.array([1, 2, 3], ctx=mx.gpu(gpu_number))
    except mx.MXNetError:
        return None
    return mx.gpu(gpu_number)


print(gpu_device)

################################################################################
# NNs on gpus
################################################################################

net = nn.Sequential()
net.add(nn.Dense(1))
net.initialize(ctx=npx.gpu())

X = np.ones((2, 3), ctx=npx.gpu())

print(net(X))
"""
Exercises
예제 #8
0
import config
from model import Generator
from model import Discriminator
from mxnet import npx
from mxnet import gluon
from mxnet import init
from tqdm import tqdm
import mxnet as mx
import numpy as np
from mxnet.optimizer import Adam
from mxnet.gluon.data import DataLoader
from mxnet.gluon.loss import SigmoidBCELoss
from engine import train_generator
from engine import train_discriminator

device = npx.gpu() if npx.num_gpus() > 0 else npx.cpu()

gen = Generator()
gen.collect_params().initialize(init=init.Normal(sigma=0.02),
                                force_reinit=True,
                                ctx=device)
# noise = random.randn(1, 100, 1, 1)
# output = gen(noise)
# print(output.shape)

dis = Discriminator()
dis.collect_params().initialize(init=init.Normal(sigma=0.02),
                                force_reinit=True,
                                ctx=device)
# noise = random.randn(1, 3, 64, 64)
# output = dis(noise)
예제 #9
0
def try_gpu(i=0):
    """
    Return gpu(i) if it exists, else return cpu()
    """
    return npx.gpu(i) if npx.num_gpus() >= i + 1 else npx.cpu()
예제 #10
0
def try_all_gpus():
    """Return all available GPUs, or [cpu(),] if no GPU exists.
       Copy from d2l library"""
    ctxes = [npx.gpu(i) for i in range(npx.num_gpus())]
    return ctxes if ctxes else [npx.cpu()]