# DataLoader be = gen_backend(backend='cpu', batch_size=batch_size, datatype=np.float32) root_files = './dataset/cub200' manifestfile = os.path.join(root_files, 'train-index.csv') testmanifest = os.path.join(root_files, 'val-index.csv') train = train_loader(manifestfile, root_files, be, h=im_size[0], w=im_size[1], scale=[0.875, 0.875]) test = validation_loader(testmanifest, root_files, be, h=im_size[0], w=im_size[1], scale=[0.875, 0.875], ncls=n_classes) OneHot = OneHot(be, n_classes) # Graph input is_train = tf.placeholder(tf.bool) keep_prob = tf.placeholder(tf.float32) x_n = tf.placeholder(tf.float32, [batch_size, 3, im_size[0], im_size[1]]) y = tf.placeholder(tf.float32, [batch_size, n_classes]) lr_tf = tf.placeholder(tf.float32) z = tf.random_uniform([batch_size, zdim], -1, 1) iny = tf.placeholder(tf.float32, [batch_size, n_classes])
zdim = 100 n_classes = 10 dropout = 0.2 im_size = [32, 32] dname, gname = 'd_', 'g_' tf.set_random_seed(1234) # DataLoader be = gen_backend(backend='cpu', batch_size=batch_size, datatype=np.float32) root_files = './dataset/Cifar10' manifestfile = os.path.join(root_files, 'train-index.csv') testmanifest = os.path.join(root_files, 'val-index.csv') train = train_loader(manifestfile, root_files, be, h=im_size[0], w=im_size[1]) test = validation_loader(testmanifest, root_files, be, h=im_size[0], w=im_size[1]) OneHot = OneHot(be, n_classes) # Graph input is_train = tf.placeholder(tf.bool) keep_prob = tf.placeholder(tf.float32) x_n = tf.placeholder(tf.float32, [batch_size, 3, im_size[0], im_size[1]]) y = tf.placeholder(tf.float32, [batch_size, n_classes]) lr_tf = tf.placeholder(tf.float32) z = tf.random_uniform([batch_size, zdim], -1, 1) iny = tf.constant( np.tile(np.eye(n_classes, dtype=np.float32), [batch_size / n_classes + 1, 1])[:batch_size, :])