import tensorflow as tf from nn.layers import conv2d, linear, flatten, nnupsampling, batchnorm, gaussnoise, pool from nn.activations import lrelu from op import log_sum_exp from data_loader import train_loader, validation_loader from neon.backends import gen_backend import numpy as np from utils import drawblock, createfolders, OneHot, image_reshape from imageio import imsave import os # Create folders to store images gen_dir, real_dir, gen_dir128 = createfolders("./genimgs/CUB128GANAE", "/gen", "/real", "/gen128") # Create folder to store models dir_name = './models/CUB128GANAE' if not os.path.exists(dir_name): os.mkdir(dir_name) # Parameters init_iter, max_iter = 0, 30000 display_iter = 100 eval_iter = 100 store_img_iter = 100 save_iter = 1000 lr_init = 0.0002 batch_size = 100 zdim = 100 n_classes = 200 dropout = 0.2
import tensorflow as tf from layers import conv2d, linear, nnupsampling, batchnorm, pool from activations import lrelu import numpy as np from utils import drawblock, createfolders from imageio import imsave import os # Create folders to store images gen_dir, gen_dir128 = createfolders("./genimgs/CUB128GANAEsample", "/gen", "/gen128") # Parameters batch_size = 100 zdim = 100 n_classes = 200 im_size = [64, 64] gname = 'g_' tf.set_random_seed( 5555) # use different seed to generate different set of images # Graph input z = tf.random_uniform([batch_size, zdim], -1, 1) # iny = tf.constant(np.eye(n_classes, dtype=np.float32)[:batch_size, :]) # uncomment to generate first 100 classes iny = tf.constant(np.eye(n_classes, dtype=np.float32)[ batch_size:, :]) # uncomment to generate second 100 classes # Generator def generator(inp_z, inp_y, reuse=False): with tf.variable_scope('Generator', reuse=reuse):
import tensorflow as tf from layers import conv2d, linear, flatten, nnupsampling, batchnorm, gaussnoise, pool from activations import lrelu from op import log_sum_exp from data_loader import train_loader, validation_loader from neon.backends import gen_backend import numpy as np from utils import drawblock, createfolders, OneHot, image_reshape from scipy.misc import imsave import os # Create folders to store images gen_dir, real_dir, gen_dir64 = createfolders("./genimgs/CIFAR64GANAE", "/gen", "/real", "/gen64") # Create folder to store models dir_name = './models/CIFAR64GANAE' if not os.path.exists(dir_name): os.mkdir(dir_name) # Parameters init_iter, max_iter = 0, 70000 display_iter = 100 eval_iter = 100 store_img_iter = 100 save_iter = 1000 lr_init = 0.0002 batch_size = 100 zdim = 100 n_classes = 10 dropout = 0.2
import tensorflow as tf from nn.layers import conv2d, linear, nnupsampling, batchnorm, pool from nn.activations import lrelu import numpy as np from utils import drawblock, createfolders from imageio import imsave import os # Create folders to store images gen_dir, gen_dir128 = createfolders("./genimgs/CIFAR64GANAEsample", "/gen", "/gen64") # Parameters batch_size = 100 zdim = 100 n_classes = 10 gname = 'g_' tf.set_random_seed( 5555) # use different seed to generate different set of images # Graph input z = tf.random_uniform([batch_size, zdim], -1, 1) iny = tf.constant( np.tile(np.eye(n_classes, dtype=np.float32), [batch_size / n_classes + 1, 1])[:batch_size, :]) # Generator def generator(inp_z, inp_y, reuse=False): with tf.variable_scope('Generator', reuse=reuse): inp = tf.concat([inp_z, inp_y], 1)