def z_iterator_visualize_model(self, nr_iterations=1000, plot_iteration_error=True): print("Sampling images from model...") batch_z = np.random.uniform(-1.0, 1.0, size=[self.batch_size, self.z_dim]).astype(np.float32) feed_dict = {self.z_vec: batch_z, self.train_phase: False} self.sess.run(self.init_z_iterator, feed_dict=feed_dict) images = self.sess.run(self.gen_images_out, feed_dict={self.train_phase: False}) #images = self.sess.run(self.gen_images, feed_dict=feed_dict) images = utils.unprocess_image(images, 127.5, 127.5).astype(np.uint8) shape = [4, self.batch_size // 4] utils.save_imshow_grid(images, self.logs_dir, "generated_z_iterator.png", shape=shape) iterator_loss_store = [] for i in tqdm(xrange(nr_iterations)): feed_dict = {self.train_phase: False} _, iterator_loss = self.sess.run( [self.z_iterator_train_op, self.gen_loss], feed_dict=feed_dict) iterator_loss_store.append(iterator_loss) if i == 0: print("begining loss is " + str(iterator_loss)) print("final loss is " + str(iterator_loss)) images_iter = self.sess.run(self.gen_images_out, feed_dict=feed_dict) images_iter = utils.unprocess_image(images_iter, 127.5, 127.5).astype(np.uint8) print("diff is " + str(np.sum(images_iter - images))) shape = [4, self.batch_size // 4] utils.save_imshow_grid(images_iter, self.logs_dir, "generated_z_iterator_after.png", shape=shape) utils.save_imshow_grid( np.abs(images_iter.astype(np.float) - images.astype(np.float)).astype(np.uint8), self.logs_dir, "generated_z_iterator_dif.png", shape=shape) if plot_iteration_error: fig = plt.figure(2) iterator_loss_store = np.array(iterator_loss_store) plt.plot(iterator_loss_store) plt.xlabel("iteration step") plt.ylabel("error (lower better)") plt.title("Error vs z iteration step") plt.show(fig)
def visualize_model(self): print("Sampling images from model...") batch_z = np.random.uniform(-1.0, 1.0, size=[self.batch_size, self.z_dim]).astype(np.float32) feed_dict = {self.z_vec: batch_z, self.train_phase: False} images = self.sess.run(self.gen_images, feed_dict=feed_dict) images = utils.unprocess_image(images, 127.5, 127.5).astype(np.uint8) shape = [4, self.batch_size // 4] utils.save_imshow_grid(images, self.logs_dir, "generated.png", shape=shape)
def train_model(self, learning_rate, beta1, beta2, epsilon, max_iteration, check_point): with tf.Graph().as_default(): # initialize with random guess logger.info( 'Initializing tensorflow graph with random guess......') noise = np.random.normal(size=self.content_shape, scale=np.std(self.content) * 0.1) initial_guess = tf.random_normal(self.content_shape) * 0.256 input_image = tf.Variable(initial_guess) parsed_net = self.vgg.load_net(input_image) # calculate loss content_loss = self._calculate_content_loss(parsed_net) style_loss = self._calculate_style_loss(parsed_net) tv_loss = self._calculate_tv_loss(input_image) loss = content_loss + style_loss + tv_loss # summary statistics tf.summary.scalar('content_loss', content_loss) tf.summary.scalar('style_loss', style_loss) tf.summary.scalar('tv_loss', tv_loss) tf.summary.scalar('total_loss', loss) summary_loss = tf.summary.merge_all() # initialize optimization train_step = tf.train.AdamOptimizer(learning_rate, beta1, beta2, epsilon).minimize(loss) with tf.Session() as session: summary_writer = tf.summary.FileWriter('logs/neural_network', session.graph) logger.info('Saving graph......') session.run(tf.global_variables_initializer()) logger.info('Initializing optimization......') logger.info('Current total loss: {}'.format(loss.eval())) for k in range(max_iteration): logger.info('Iteration {} total loss {}'.format( str(k + 1), loss.eval())) train_step.run() summary = session.run(summary_loss) summary_writer.add_summary(summary, k) # save intermediate images at checkpoints if (check_point and (not k % check_point)) or k == max_iteration - 1: output_temp = input_image.eval() output_image = unprocess_image( output_temp.reshape(self.content_shape[1:]), self.vgg.mean_pixel) yield k, output_image
def visualize_model(self, iterations, shape=[4, 16]): print("Sampling images from model...") batch_z = np.random.uniform(-1.0, 1.0, size=[self.batch_size, self.z_dim]).astype(np.float32) feed_dict = {self.z_vec: batch_z, self.train_phase: False} for cls in range(self.num_cls): feed_dict[self.class_num] = cls images = self.sess.run(self.gen_images, feed_dict=feed_dict) #print(labels) images = utils.unprocess_image(images, 127.5, 127.5).astype(np.uint8) save_img_fn = "generated_cls" + str(cls) + "_" + str( int(iterations)) + ".png" utils.save_imshow_grid(images, self.logs_dir, save_img_fn, shape=shape)
def visualize_model(self): print("Sampling images from model...") batch_z = np.random.uniform(-1.0, 1.0, size=[self.batch_size, self.z_dim]).astype(np.float32) feed_dict = {self.z_vec: batch_z, self.train_phase: False} images = self.sess.run(self.gen_images, feed_dict=feed_dict) images = utils.unprocess_image(images, 127.5, 127.5).astype(np.uint8) for i, image in enumerate(images): Image.fromarray(image).save( os.path.join(self.logs_dir, "%d_ge_testn.jpg" % i)) return shape = [4, self.batch_size // 4] utils.save_imshow_grid(images, self.logs_dir, "generated.png", shape=shape)
def visualize_model(self, logdir=None): if not logdir: logdir = self.logs_dir print(self.root_scope_name + "Sampling images from model...") batch_z = np.random.uniform(-1.0, 1.0, size=[self.batch_size, self.z_dim]).astype(np.float32) feed_dict = {self.z_vec: batch_z, self.train_phase: False} images = self.sess.run(self.gen_images, feed_dict=feed_dict) images = utils.unprocess_image(images, 127.5, 127.5).astype(np.uint8) shape = [4, self.batch_size // 4] utils.save_imshow_grid(images, logdir, "generated_palette.png", shape=shape) for i in range(len(images)): scipy.misc.imsave(logdir + "/generated_image%d.png" % (i + 1), images[i])