Esempio n. 1
0
    def bottleneck_block(self, x, scope):
        with tf.variable_scope(scope):
            x = batch_norm(x, is_training=self.is_training, scope='BN1')
            x = Relu(x)
            x = conv_3d(x, filter_size=1, num_filters=4 * self.k, layer_name='conv1', add_reg=self.conf.use_reg)
            x = drop_out(x, keep_prob=self.keep_prob)

            x = batch_norm(x, is_training=self.is_training, scope='BN2')
            x = Relu(x)
            x = conv_3d(x, filter_size=3, num_filters=self.k, layer_name='conv2', add_reg=self.conf.use_reg)
            x = drop_out(x, keep_prob=self.keep_prob)
            return x
Esempio n. 2
0
 def transition_down(self, x, scope):
     with tf.variable_scope(scope):
         x = batch_norm(x, is_training=self.is_training, scope='BN')
         x = Relu(x)
         x = conv_3d(x, filter_size=1, num_filters=self.trans_out, layer_name='conv', add_reg=self.conf.use_reg)
         x = drop_out(x, keep_prob=self.keep_prob)
         x = avg_pool(x, ksize=2, stride=2, scope='avg_pool')
         return x
Esempio n. 3
0
 def transition_up(self, x, scope, num_filters=None):
     with tf.variable_scope(scope):
         x = batch_norm(x, is_training=self.is_training, scope='BN')
         x = Relu(x)
         x = conv_2d(x,
                     filter_size=1,
                     num_filters=int(x.get_shape().as_list()[-1] *
                                     self.theta_up),
                     layer_name='conv',
                     add_reg=self.conf.use_reg)
         x = drop_out(x, keep_prob=self.keep_prob)
         x = batch_norm(x, is_training=self.is_training, scope='BN')
         x = Relu(x)
         if not num_filters:
             num_filters = self.trans_out
         x = deconv_2d(inputs=x,
                       filter_size=3,
                       num_filters=num_filters,
                       layer_name='deconv',
                       stride=2,
                       batch_norm=False,
                       is_train=self.is_training)
         x = drop_out(x, keep_prob=self.keep_prob)
     return x
Esempio n. 4
0
 def transition_up(self, x, out_shape, scope, num_filters=None):
     with tf.variable_scope(scope):
         x = batch_norm(x, is_training=self.is_training, scope='BN')
         x = Relu(x)
         if not num_filters:
             num_filters = self.trans_out
         x = deconv_3d(inputs=x,
                       filter_size=3,
                       num_filters=num_filters,
                       layer_name='deconv',
                       stride=2,
                       add_reg=self.conf.use_reg,
                       add_batch_norm=False,
                       is_train=self.is_training,
                       out_shape=out_shape)
         x = drop_out(x, keep_prob=self.keep_prob)
     return x