lr = 0.0005 beta1 = 0.5 batch_size = 100 rand_shape = (batch_size, 100) num_epoch = 100 data_shape = (batch_size, 1, 28, 28) context = mx.gpu() logging.basicConfig(level=logging.DEBUG, format='%(asctime)-15s %(message)s') sym_gen = generator.dcgan28x28(oshape=data_shape, ngf=32, final_act="sigmoid") encoder = encoder.lenet() encoder = ops.minibatch_layer(encoder, batch_size, num_kernels=100) gmod = module.GANModule(sym_gen, symbol_encoder=encoder, context=context, data_shape=data_shape, code_shape=rand_shape) gmod.init_params(mx.init.Xavier(factor_type="in", magnitude=2.34)) gmod.init_optimizer(optimizer="adam", optimizer_params={ "learning_rate": lr, "wd": 0., "beta1": beta1, }) data_dir = './../../mxnet/example/image-classification/mnist/' train = mx.io.MNISTIter(image=data_dir + "train-images-idx3-ubyte", label=data_dir + "train-labels-idx1-ubyte",
ngf = 64 lr = 0.0003 beta1 = 0.5 batch_size = 100 rand_shape = (batch_size, 100) num_epoch = 100 data_shape = (batch_size, 3, 32, 32) context = mx.gpu() logging.basicConfig(level=logging.DEBUG, format='%(asctime)-15s %(message)s') sym_gen = generator.dcgan32x32(oshape=data_shape, ngf=ngf, final_act="tanh") sym_dec = encoder.dcgan(ngf=ngf / 2) gmod = module.GANModule(sym_gen, sym_dec, context=context, data_shape=data_shape, code_shape=rand_shape) gmod.modG.init_params(mx.init.Normal(0.05)) gmod.modD.init_params(mx.init.Xavier(factor_type="in", magnitude=2.34)) gmod.init_optimizer(optimizer="adam", optimizer_params={ "learning_rate": lr, "wd": 0., "beta1": beta1, }) data_dir = './../../mxnet/example/image-classification/cifar10/' train = mx.io.ImageRecordIter(path_imgrec=data_dir + "train.rec",