Ejemplo n.º 1
0
    def define_graph(self):
        """
        Sets up the model graph in TensorFlow.
        """
        with tf.name_scope('discriminator'):
            ##
            # Setup scale networks. Each will make the predictions for images at a given scale.
            ##

            self.scale_nets = []
            #for scale_num in xrange(self.num_scale_nets):
            for scale_num in range(self.num_scale_nets):
                with tf.name_scope('scale_net_' + str(scale_num)):
                    scale_factor = 1. / 2**(
                        (self.num_scale_nets - 1) - scale_num)
                    self.scale_nets.append(
                        DScaleModel(scale_num, int(self.height * scale_factor),
                                    int(self.width * scale_factor),
                                    self.scale_conv_layer_fms[scale_num],
                                    self.scale_kernel_sizes[scale_num],
                                    self.scale_fc_layer_sizes[scale_num]))

            # A list of the prediction tensors for each scale network
            self.scale_preds = []
            #for scale_num in xrange(self.num_scale_nets):
            for scale_num in range(self.num_scale_nets):
                self.scale_preds.append(self.scale_nets[scale_num].preds)

            ##
            # Data
            ##

            self.labels = tf.placeholder(tf.float32,
                                         shape=[None, 1],
                                         name='labels')

            ##
            # Training
            ##

            with tf.name_scope('training'):
                # global loss is the combined loss from every scale network
                self.global_loss = adv_loss(self.scale_preds, self.labels)
                self.global_step = tf.Variable(0,
                                               trainable=False,
                                               name='global_step')
                self.optimizer = tf.train.GradientDescentOptimizer(
                    c.LRATE_D, name='optimizer')
                self.train_op = self.optimizer.minimize(
                    self.global_loss,
                    global_step=self.global_step,
                    name='train_op')

                # add summaries to visualize in TensorBoard
                #loss_summary = tf.scalar_summary('loss_D', self.global_loss)
                loss_summary = tf.summary.scalar('loss_D', self.global_loss)
                self.summaries = tf.summary.merge([loss_summary])
    def setup_scale_nets(self):
        """
        Setup scale networks. Each will make the predictions for images at a given scale. Done
        separately from define_graph() so that the generator can define its graph using the
        discriminator scale nets before this defines its graph using the generator.
        """

        self.scale_nets = []
        for scale_num in xrange(self.num_scale_nets):
            with tf.name_scope('scale_net_' + str(scale_num)):
                scale_factor = 1. / self.inverse_scale_factor[scale_num]
                scale_model = DScaleModel(scale_num,
                                          int(self.height * scale_factor),
                                          int(self.width * scale_factor),
                                          self.scale_conv_layer_fms[scale_num],
                                          self.scale_kernel_sizes[scale_num],
                                          self.scale_fc_layer_sizes[scale_num])
                self.scale_nets.append(scale_model)

                self.train_vars += scale_model.train_vars
Ejemplo n.º 3
0
    def define_graph(self):
        """
        Sets up the model graph in TensorFlow.
        """
        with tf.name_scope('discriminator'):
            ##
            # Setup scale networks. Each will make the predictions for images at a given scale.
            ##

            self.scale_nets = []
            for scale_num in range(self.num_scale_nets):
                with tf.name_scope('scale_net_' + str(scale_num)):
                    scale_factor = 1. / 2**(
                        (self.num_scale_nets - 1) - scale_num)
                    self.scale_nets.append(
                        DScaleModel(scale_num, int(self.height * scale_factor),
                                    int(self.width * scale_factor),
                                    self.scale_conv_layer_fms[scale_num],
                                    self.scale_kernel_sizes[scale_num],
                                    self.scale_fc_layer_sizes[scale_num],
                                    self.is_w))

            # A list of the prediction tensors for each scale network
            self.scale_preds = []
            for scale_num in range(self.num_scale_nets):
                self.scale_preds.append(self.scale_nets[scale_num].preds)

            ##
            # Data
            ##

            self.labels = tf.placeholder(tf.float32,
                                         shape=[None, 1],
                                         name='labels')

            ##
            # Training
            ##

            with tf.name_scope('training'):
                # global loss is the combined loss from every scale network

                self.global_loss = adv_loss(self.scale_preds, self.labels,
                                            self.is_w)
                self.global_step = tf.Variable(0.0,
                                               trainable=False,
                                               name='global_step')

                with tf.control_dependencies(
                        tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
                    if self.is_w == True:
                        self.optimizer = tf.train.RMSPropOptimizer(
                            self.c.LRATE_D, name='optimizer')
                    else:
                        self.optimizer = tf.train.GradientDescentOptimizer(
                            self.c.LRATE_D, name='optimizer')
                    # self.optimizer = tf.train.GradientDescentOptimizer(self.c.LRATE_D, name='optimizer')
                    self.train_op = self.optimizer.minimize(
                        self.global_loss,
                        global_step=self.global_step,
                        name='train_op',
                        var_list=self.discriminator_vars)

                # add summaries to visualize in TensorBoard
                self.clip = [
                    v.assign(tf.clip_by_value(v, -0.01, 0.01))
                    for v in self.discriminator_vars
                ]
                loss_summary = tf.summary.scalar('loss_D', self.global_loss)
                self.summaries = tf.summary.merge([loss_summary])
Ejemplo n.º 4
0
    def define_graph(self):
        """
        Sets up the model graph in TensorFlow.
        """
        with tf.name_scope('discriminator'):
            ##
            # Setup scale networks. Each will make the predictions for images at a given scale.
            ##

            self.scale_nets = []
            for scale_num in range(self.num_scale_nets):
                with tf.name_scope('scale_net_' + str(scale_num)):
                    scale_factor = 1. / 2 ** ((self.num_scale_nets - 1) - scale_num)
                    self.scale_nets.append(DScaleModel(scale_num,
                                                       int(self.height * scale_factor),
                                                       int(self.width * scale_factor),
                                                       self.scale_conv_layer_fms[scale_num],
                                                       self.scale_kernel_sizes[scale_num],
                                                       self.scale_fc_layer_sizes[scale_num]))

            # A list of the prediction tensors for each scale network
            self.scale_preds = []
            for scale_num in range(self.num_scale_nets):
                self.scale_preds.append(self.scale_nets[scale_num].preds)

            ##
            # Data
            ##

            self.labels = tf.placeholder(tf.float32, shape=[None, 1], name='labels')

            ##
            # Training
            ##

            with tf.name_scope('training'):
                # global loss is the combined loss from every scale network
                self.global_loss = adv_loss(self.scale_preds, self.labels)

                if c.WASSERSTEIN and c.W_GP:
                    epsilon = tf.random_uniform([], 0.0, 1.0)
                    grad_penality = []
                    for scale_net in self.scale_nets:
                        fake, real = tf.split(scale_net.input_frames,2)
                        self.x_hat = real * epsilon + (1 - epsilon) * fake
                        self.d_hat = scale_net.generate_predictions(self.x_hat)
                        grad_penality.append(grad_penality_loss(self.x_hat, self.d_hat))
                    self.global_loss += c.LAM_GP * tf.reduce_mean(grad_penality)

                self.global_step = tf.Variable(0, trainable=False, name='global_step')
                self.optimizer = tf.train.AdamOptimizer(c.LRATE_D, name='optimizer')
                self.train_op_ = self.optimizer.minimize(self.global_loss,
                                                        global_step=self.global_step,
                                                        name='train_op')

                # Clipping to enforce 1-Lipschitz function
                if c.WASSERSTEIN and not c.W_GP:
                    with tf.control_dependencies([self.train_op_]):
                        self.train_op = tf.group(*(tf.assign(var, tf.clip_by_value(var, -c.W_Clip, c.W_Clip)) for var in tf.trainable_variables() if 'discriminator' in var.name))
                else:
                    self.train_op = self.train_op_

                # add summaries to visualize in TensorBoard
                loss_summary = tf.summary.scalar('loss_D', self.global_loss)
                self.summaries = tf.summary.merge([loss_summary])