with tf.control_dependencies(update_ops): d_train = optimizer.minimize(d_loss, var_list=d_vars) ############ summary writing ######################################## with tf.name_scope('summaries'): tf.summary.scalar('wasserstein_scaled', wasserstein_scaled) tf.summary.scalar('wasserstein', wasserstein) tf.summary.scalar('g_loss', g_loss) tf.summary.scalar('d_loss', d_loss) tf.summary.scalar('d_regularizer_niqe', niqe_score_mean_grad) tf.summary.scalar('d_regularizer_gp', d_regularizer1) tf.summary.scalar('learning_rate', learning_rate) tf.summary.scalar('added_regularizer', added_regularizer) tf.summary.scalar('learning_rate', learning_rate) tf.summary.scalar('global_step', global_step) atf.image_grid_summary('x_generated', x_generated) merged_summary = tf.summary.merge_all() ############### intialize the variables ################ sess.run([tf.global_variables_initializer(), tf.local_variables_initializer()]) ############ The image files and coordinate the loading of image files ######### coord = tf.train.Coordinator() threads = tf.train.start_queue_runners(coord=coord) ########### Add op to save and restore ######################################### saver = tf.train.Saver(max_to_keep=10) ######## i = 1000 uncomment and enter the model number for restoring the model ##### if not reset: nn = name + "/model.ckpt-" + str(i) saver.restore(sess, nn)
tf.summary.scalar('d_loss', d_loss) atf.scalars_summary('d_true', d_true) atf.scalars_summary('d_generated', d_generated) tf.summary.scalar('d_regularizer', d_regularizer) tf.summary.scalar('d_regularizer_mean', d_regularizer_mean) tf.summary.scalar('learning_rate', learning_rate) tf.summary.scalar('global_step', global_step) atf.scalars_summary('x_generated', x_generated) atf.scalars_summary('x_true', x_true) atf.scalars_summary('gamma', gamma) atf.scalars_summary('lamb', lamb) atf.image_grid_summary('x_true', x_true) atf.image_grid_summary('x_generated', x_generated) atf.image_grid_summary('gradients', gradients) atf.image_grid_summary('dual_sobolev_gradients', dual_sobolev_gradients) atf.scalars_summary('ddx', ddx) atf.scalars_summary('gradients', gradients) atf.scalars_summary('dual_sobolev_gradients', dual_sobolev_gradients) merged_summary = tf.summary.merge_all() # Advanced metrics with tf.name_scope('inception'): # Specific function to compute inception score for very large # number of samples def generate_and_classify(z):