# discriminator loss loss_d_r = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=disc_real, labels=y_real)) loss_d_f = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=disc_fake, labels=y_fake)) loss_d = (loss_d_r + loss_d_f) / 2 print 'loss_d', loss_d.get_shape() # generator loss loss_g = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=disc_fake, labels=y_real)) # categorical factor loss loss_c = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=cat_fake, labels=z_cat)) # continuous factor loss loss_con =tf.reduce_mean(tf.square(con_fake-z_con)) train_disc, disc_global_step = optim(loss_d + loss_c + loss_con, lr=0.0001, optim = 'Adm', category='discriminator') train_gen, gen_global_step = optim(loss_g + loss_c + loss_con, lr=0.001, optim = 'Adm', category='generator') init = tf.global_variables_initializer() saver = tf.train.Saver() print train_gen cur_epoch = 0 cur_step = 0 with tf.Session() as sess: sess.run(init) coord, threads = queue_context(sess) try: while not coord.should_stop(): cur_step += 1
loss_d = (loss_d_r + loss_d_f) / 2 print 'loss_d', loss_d.get_shape() # generator loss loss_g = tf.reduce_mean( tf.nn.sigmoid_cross_entropy_with_logits(logits=disc_fake, labels=y_real)) # categorical factor loss loss_c = tf.reduce_mean( tf.nn.sparse_softmax_cross_entropy_with_logits(logits=cat_fake, labels=z_cat)) # continuous factor loss loss_con = tf.reduce_mean(tf.square(con_fake - z_con)) train_disc, disc_global_step = optim(loss_d + loss_c + loss_con, lr=0.0001, optim='Adm', category='discriminator') train_gen, gen_global_step = optim(loss_g + loss_c + loss_con, lr=0.001, optim='Adm', category='generator') init = tf.global_variables_initializer() saver = tf.train.Saver() print train_gen cur_epoch = 0 cur_step = 0 with tf.Session() as sess: sess.run(init) coord, threads = queue_context(sess)