def optimization(self):
        train_discriminator, train_generator, _ = optimizer(
            self.beta_1,
            self.loss_gen,
            self.loss_dis,
            self.loss_type,
            self.learning_rate_input_d,
            self.learning_rate_input_g,
            None,
            beta_2=self.beta_2,
            gen_name='generator',
            dis_name='discriminator_gen',
            mapping_name='mapping_',
            encoder_name='encoder')

        with tf.control_dependencies(tf.get_collection(
                tf.GraphKeys.UPDATE_OPS)):
            # Quick dirty optimizer for Encoder.
            trainable_variables = tf.trainable_variables()
            encoder_variables = [
                variable for variable in trainable_variables
                if variable.name.startswith('encoder')
            ]
            train_encoder = tf.train.AdamOptimizer(
                learning_rate=self.learning_rate_input_e,
                beta1=self.beta_1).minimize(self.loss_enc,
                                            var_list=encoder_variables)

        return train_discriminator, train_generator, train_encoder
コード例 #2
0
 def optimization(self):
     train_discriminator, train_generator = optimizer(
         self.beta_1,
         self.loss_gen,
         self.loss_dis,
         self.loss_type,
         self.learning_rate_input_g,
         self.learning_rate_input_d,
         beta_2=self.beta_2)
     return train_discriminator, train_generator
コード例 #3
0
ファイル: SAGAN.py プロジェクト: AdalbertoCq/Pathology-GAN
	def optimization(self):
		train_discriminator, train_generator = optimizer(self.beta_1, self.loss_gen, self.loss_dis, self.loss_type, self.learning_rate_input_g, self.learning_rate_input_d, beta_2=self.beta_2)
		return train_discriminator, train_generator