Example #1
0
 def transition_up(self, x, scope, num_filters=None):
     with tf.variable_scope(scope):
         x = batch_norm(x, is_training=self.is_training_pl, scope='BN_1')
         x = Relu(x)
         x = conv_2d(x,
                     filter_size=1,
                     num_filters=int(x.get_shape().as_list()[-1] *
                                     self.theta_up),
                     layer_name='conv',
                     add_reg=self.conf.use_reg,
                     add_batch_norm=self.conf.use_BN,
                     is_train=self.is_training_pl)
         x = tf.nn.dropout(x, keep_prob=self.keep_prob_pl)
         x = batch_norm(x, is_training=self.is_training_pl, scope='BN_2')
         x = Relu(x)
         if not num_filters:
             num_filters = self.trans_out
         x = deconv_2d(inputs=x,
                       filter_size=3,
                       num_filters=num_filters,
                       layer_name='deconv',
                       stride=2,
                       add_reg=self.conf.use_reg,
                       add_batch_norm=False,
                       is_train=self.is_training_pl)
         x = tf.nn.dropout(x, keep_prob=self.keep_prob_pl)
     return x
Example #2
0
 def up_conv(self, x):
     num_out_channels = x.get_shape().as_list()[-1]
     x = deconv_2d(inputs=x,
                   filter_size=3,
                   num_filters=num_out_channels,
                   layer_name='conv_up',
                   stride=2,
                   add_batch_norm=False,
                   is_train=self.is_training_pl)
     return x
Example #3
0
 def up_conv(self, x):
     num_out_channels = get_num_channels(x) // 2
     x = deconv_2d(inputs=x,
                   filter_size=2,
                   num_filters=num_out_channels,
                   layer_name='conv_up',
                   stride=2,
                   add_batch_norm=self.conf.use_BN,
                   is_train=self.is_training_pl)
     return x