def try_gpu(i=0): """如果存在,则返回gpu(i),否则返回cpu()。""" return npx.gpu(i) if npx.num_gpus() >= i + 1 else npx.cpu()
def try_all_gpus(): """返回所有可用的GPU,如果没有GPU,则返回[cpu()]。""" devices = [npx.gpu(i) for i in range(npx.num_gpus())] return devices if devices else [npx.cpu()]
def try_gpu(i=0): #@save """Return gpu(i) if exists, otherwise return cpu().""" return npx.gpu(i) if npx.num_gpus() >= i + 1 else npx.cpu()
def try_all_gpus(): #@save """Return all available GPUs, or [cpu()] if no GPU exists.""" devices = [npx.gpu(i) for i in range(npx.num_gpus())] return devices if devices else [npx.cpu()]
import config from model import Generator from model import Discriminator from mxnet import npx from mxnet import gluon from mxnet import init from tqdm import tqdm import mxnet as mx import numpy as np from mxnet.optimizer import Adam from mxnet.gluon.data import DataLoader from mxnet.gluon.loss import SigmoidBCELoss from engine import train_generator from engine import train_discriminator device = npx.gpu() if npx.num_gpus() > 0 else npx.cpu() gen = Generator() gen.collect_params().initialize(init=init.Normal(sigma=0.02), force_reinit=True, ctx=device) # noise = random.randn(1, 100, 1, 1) # output = gen(noise) # print(output.shape) dis = Discriminator() dis.collect_params().initialize(init=init.Normal(sigma=0.02), force_reinit=True, ctx=device) # noise = random.randn(1, 3, 64, 64) # output = dis(noise)
def try_gpu(i=0): """ Return gpu(i) if it exists, else return cpu() """ return npx.gpu(i) if npx.num_gpus() >= i + 1 else npx.cpu()
def try_all_gpus(): """Return all available GPUs, or [cpu(),] if no GPU exists. Copy from d2l library""" ctxes = [npx.gpu(i) for i in range(npx.num_gpus())] return ctxes if ctxes else [npx.cpu()]