Example #1
0
 def apool(self,
           k_height,
           k_width,
           d_height=2,
           d_width=2,
           mode='VALID',
           input_layer=None,
           num_channels_in=None):
   """Construct an average pooling layer."""
   if input_layer is None:
     input_layer = self.top_layer
   else:
     self.top_size = num_channels_in
   name = 'apool' + str(self.counts['apool'])
   self.counts['apool'] += 1
   pool = pooling_layers.average_pooling2d(
       input_layer, [k_height, k_width], [d_height, d_width],
       padding=mode,
       data_format=self.channel_pos,
       name=name)
   self.top_layer = pool
   return pool
def NonLocalBlock(input_x,
                  out_channels,
                  sub_sample=1,
                  nltype=0,
                  is_bn=False,
                  scope='NonLocalBlock'):
    '''https://github.com/nnUyi/Non-Local_Nets-Tensorflow
    '''
    batchsize, height, width, in_channels = input_x.get_shape().as_list()
    typedict = {
        0: 'embedded_gaussian',
        1: 'gaussian',
        2: 'dot_product',
        3: 'concat'
    }
    with tf.variable_scope(scope) as sc:
        if nltype <= 2:
            with tf.variable_scope('g') as scope:
                g = conv2d(input_x,
                           out_channels,
                           1,
                           strides=1,
                           padding='same',
                           name='g')
                if sub_sample > 1:
                    g = average_pooling2d(g,
                                          pool_size=sub_sample,
                                          strides=sub_sample,
                                          name='g_pool')

            with tf.variable_scope('phi') as scope:
                if nltype == 0 or nltype == 2:
                    phi = conv2d(input_x,
                                 out_channels,
                                 1,
                                 strides=1,
                                 padding='same',
                                 name='phi')
                elif nltype == 1:
                    phi = input_x
                if sub_sample > 1:
                    phi = average_pooling2d(phi,
                                            pool_size=sub_sample,
                                            strides=sub_sample,
                                            name='phi_pool')

            with tf.variable_scope('theta') as scope:
                if nltype == 0 or nltype == 2:
                    theta = conv2d(input_x,
                                   out_channels,
                                   1,
                                   strides=1,
                                   padding='same',
                                   name='theta')
                elif nltype == 1:
                    theta = input_x

            g_x = tf.reshape(g, [batchsize, -1, out_channels])
            theta_x = tf.reshape(theta, [batchsize, -1, out_channels])

            # theta_x = tf.reshape(theta, [batchsize, out_channels, -1])
            # theta_x = tf.transpose(theta_x, [0,2,1])
            phi_x = tf.reshape(phi, [batchsize, -1, out_channels])
            phi_x = tf.transpose(phi_x, [0, 2, 1])
            #phi_x = tf.reshape(phi_x, [batchsize, out_channels, -1])

            f = tf.matmul(theta_x, phi_x)
            # ???
            if nltype <= 1:
                # f_softmax = tf.nn.softmax(f, -1)
                f = tf.exp(f)
                f_softmax = f / tf.reduce_sum(f, axis=-1, keepdims=True)
            elif nltype == 2:
                f = tf.nn.relu(f)  #/int(f.shape[-1])
                f_mean = tf.reduce_sum(f, axis=[2], keepdims=True)
                #print(f.shape,f_mean.shape)
                f_softmax = f / f_mean
            y = tf.matmul(f_softmax, g_x)
            y = tf.reshape(y, [batchsize, height, width, out_channels])
            with tf.variable_scope('w') as scope:
                w_y = conv2d(y,
                             in_channels,
                             1,
                             strides=1,
                             padding='same',
                             name='w')
                # if is_bn:
                #     w_y = slim.batch_norm(w_y)
            z = w_y  #input_x + w_y
            return z
 def _gap(self, bottom):
     height = bottom.get_shape()[1]
     width = bottom.get_shape()[2]
     return average_pooling2d(bottom, [height,width], [height,width])
 def _avgpool(self, bottom, stride):
     return average_pooling2d(bottom, [stride,stride], [stride,stride])